![]() |
operands could not be broadcast together with shapes (337,451) (225,301) - Printable Version +- Python Forum (https://python-forum.io) +-- Forum: Python Coding (https://python-forum.io/forum-7.html) +--- Forum: General Coding Help (https://python-forum.io/forum-8.html) +--- Thread: operands could not be broadcast together with shapes (337,451) (225,301) (/thread-35786.html) |
operands could not be broadcast together with shapes (337,451) (225,301) - kevinabbot - Dec-14-2021 So I am simply trying to take the 4th hour of data and subtract accumulated snow from the 3rd hour to get the snowfall in just the past hour. It is saying the sizing is off? (I am new to python). It does not make any sense because it is the same numerical weather prediction model just at the forecast hour +1 so in this case I am taking 5hr snowfall -4th hour snowfall and it is giving me this error. I tried the x.dot(y) idea but it just gave me another error. Is there anyway to make this work? Error message: https://i.stack.imgur.com/B7xc7.png ############# Imports from datetime import datetime, timedelta import requests import metpy.calc as mpcalc from metpy.units import units import urllib.request from zipfile import ZipFile import xarray as xr import matplotlib import matplotlib.pyplot as plt from shapely import geometry import numpy as np import fiona #Load data out_file = '/home/jupyter-kfa5169/FDI/FDI03.shp' ###########Time Stamps run_dt = datetime.utcnow()-timedelta(hours=1) # previous hour fhr = '03' fhr1 = '04' run = run_dt.strftime('%H') ymd = run_dt.strftime('%Y%m%d') ##########Wind Data Import # Data download url = 'https://nomads.ncep.noaa.gov/cgi-bin/filter_rap.pl?file=rap.t{run}z.awp252pgrbf{fhr}.grib2&lev_surface=on&var_ASNOW=on&var_FRZR=on&var_GUST=on&var_PRATE=on&var_VIS=on&leftlon=0&rightlon=360&toplat=90&bottomlat=-90&dir=%2Frap.{ymd}' URL = url.format(run=run, fhr=fhr, ymd=ymd) r = requests.get(URL) with open('/home/jupyter-kfa5169/FDI/wind.grib03', 'wb') as fh: fh.write(r.content) ds = xr.open_dataset('/home/jupyter-kfa5169/FDI/wind.grib03', engine='cfgrib') lon, lat = ds.longitude.values-360., ds.latitude.values gust = mpcalc.smooth_gaussian((ds.variables['gust'].values*units('m/s')).to('mph').magnitude, 4) visab = mpcalc.smooth_gaussian((ds.variables['vis'].values*units('m')).to('mile').magnitude, 4) snow00 = mpcalc.smooth_gaussian((ds.variables['asnow'].values*units('m')).to('inch').magnitude, 4) prate = ds.variables['prate'] prate = prate*141.732 #vshear = ds.variables['vvcsh'] #ushear = ds.variables['vucsh'] ################## Makes The Levels for Visability nvis = np.copy(visab) vis = np.ones(nvis.shape) vis = np.where(nvis <= 1.0, 3, vis) vis = np.where(nvis <= 0.5, 5, vis) vis = np.where(nvis <= 0.1, 8, vis) vis = np.where(nvis <= 0.04, 14, vis) ################## Makes The levels Gusts ngust = np.copy(gust) gst = np.ones(ngust.shape) gst = np.where(ngust >= 40, 3, gst) gst = np.where(ngust >= 50, 6, gst) gst = np.where(ngust >= 60, 15, gst) ################## Make the rainfall rates nrate = np.copy(prate) rr = np.ones(nrate.shape) rr = np.where(nrate >= 0.1, 2, rr) rr = np.where(nrate >= 0.3, 3, rr) rr = np.where(nrate >= 0.5, 6, rr) rr = np.where(nrate >= 1.0, 12, rr) ################################# url = 'https://nomads.ncep.noaa.gov/cgi-bin/filter_rap.pl?file=rap.t{run}z.awp130pgrbf{fhr1}.grib2&lev_surface=on&var_ASNOW=on&var_FROZR=on&var_FRZR=on&leftlon=0&rightlon=360&toplat=90&bottomlat=-90&dir=%2Frap.{ymd}' URL = url.format(run=run, fhr1=fhr1, ymd=ymd) r = requests.get(URL) with open('/home/jupyter-kfa5169/FDI/wind.grib04', 'wb') as fh: fh.write(r.content) ds1 = xr.open_dataset('/home/jupyter-kfa5169/FDI/wind.grib04', engine='cfgrib') asnow = mpcalc.smooth_gaussian((ds1.variables['asnow'].values*units('m')).to('inch').magnitude, 4) asnow = asnow-snow00 frz = ds1.variables['frzr'] frz = frz*0.0393701 ############# SNOW ###################### nsnow = np.copy(asnow) sn = np.ones(nsnow.shape) sn = np.where(nsnow >= 0.5, 5, sn) sn = np.where(nsnow >= 1.0, 8, sn) sn = np.where(nsnow >= 1.5, 15, sn) ############# Freezing Rain################ nfrz = np.copy(frz) fr = np.ones(nfrz.shape) fr = np.where(nfrz > 0.02, 6, fr) fr = np.where(nfrz >= 0.05, 12, fr) fr = np.where(nfrz >= 0.1, 20, fr) ############################# PLOT #################### fig = plt.figure(figsize=(12,10)) ax = fig.add_subplot(111) final = rr+gst+vis+fr+sn cs = ax.contourf(lon, lat, final, levels=[6,8,10,12,14,16,18,20,22,26]) ## EDIT CONTOUR LEVELS HERE plt.colorbar plt.show() #############################Saving As Shapefile############################## # Write contours to shapefile lvl_lookup = dict(zip(cs.collections, cs.levels)) PolyList=[] for col in cs.collections: z = lvl_lookup[col] # the value of this level for contour_path in col.get_paths(): # create the polygon for this level for ncp,cp in enumerate(contour_path.to_polygons()): lons = cp[:,0] lats = cp[:,1] new_shape = geometry.Polygon([(i[0], i[1]) for i in zip(lons,lats)]) poly = new_shape # first shape PolyList.append({'poly':poly,'props':{'z': z}}) schema = {'geometry': 'Polygon','properties': {'z': 'float'}} crs = {'no_defs': True, 'ellps': 'WGS84', 'datum': 'WGS84', 'proj': 'longlat'} with fiona.collection('/home/jupyter-kfa5169/FDI/FDI03.shp', 'w', driver='ESRI Shapefile', crs=crs, schema=schema) as output: for p in PolyList: output.write({'properties': p['props'], 'geometry': geometry.mapping(p['poly'])}) # create a ZipFile object rap_temp = ZipFile('/home/jupyter-kfa5169/FDI/FDI03.zip', 'w') # Add multiple files to the zip rap_temp.write('/home/jupyter-kfa5169/FDI/FDI03.cpg') rap_temp.write('/home/jupyter-kfa5169/FDI/FDI03.dbf') rap_temp.write('/home/jupyter-kfa5169/FDI/FDI03.prj') rap_temp.write('/home/jupyter-kfa5169/FDI/FDI03.shp') rap_temp.write('/home/jupyter-kfa5169/FDI/FDI03.shx') #Close the zip file ####################################### #Arcgis |