Python-IrradPy icon indicating copy to clipboard operation
Python-IrradPy copied to clipboard

I want to run the clear sky data, but I am having problem about the integrity check failure

Open jo-omoyele opened this issue 1 year ago • 1 comments

import irradpy import multiprocessing

Important Note: If you're using windows, make sure to wrap the function.

if name == "main": multiprocessing.freeze_support() irradpy.downloader.run(auth={"uid":"o_olalekan", "password": "###"})

Download All Data From 2018-01-01 To 2018-01-02

irradpy.downloader.run(auth={"uid":"o_olalekan", "password": "###"}, initial_year=2020, final_year=2020, initial_month=1, final_month=1, initial_day=1, final_day=2, lat_1=-90, lat_2=90, lon_1=-180, lon_2=180, verbose=True, thread_num=20, connection_num=2 )

Run clear sky model from 2018-01-01 To 2018-01-02

time_delta = 10 # minute timedef = [('2018-01-01T00:00:00', '2018-01-02T00:00:00')] time = irradpy.model.timeseries_builder(timedef, time_delta, np.size(latitudes)) irradpy.model.ClearSkyREST2v5(latitudes, longitudes, elevations, time, dataset_dir).REST2v5()

INFO:root:Downloading data from 2023-4-22 to 2023-4-22... INFO:root:Request Already Exist in Download Directory, Adding More Files... INFO:root:---- Begin Analysing Directory ---- ERROR:root:* Found previous corrupted file MERRA2_400.tavg1_2d_rad_Nx.20230422.nc4.nc?ALBEDO[0:23][0:360][0:575],CLDTOT[0:23][0:360][0:575],SWGDN[0:23][0:360][0:575],SWGDNCLR[0:23][0:360][0:575],TAUTOT[0:23][0:360][0:575],time,lat[0:360],lon[0:575], Scheduled for redownload INFO:root:---- End Analysing Directory ---- INFO:root:Preparing new file MERRA2_400.tavg1_2d_rad_Nx.20230422.nc4.nc?ALBEDO[0:23][0:360][0:575],CLDTOT[0:23][0:360][0:575],SWGDN[0:23][0:360][0:575],SWGDNCLR[0:23][0:360][0:575],TAUTOT[0:23][0:360][0:575],time,lat[0:360],lon[0:575] from M2T1NXRAD.5.12.4 INFO:root:---- Begin Download ---- CRITICAL:root:% New File from Date 2023-04-22 Integrity Check Failed INFO:root:---- Download Finished ---- INFO:root:---- Begin Analysing Directory ---- ERROR:root:* Found previous corrupted file MERRA2_400.tavg1_2d_slv_Nx.20230422.nc4.nc?TQV[0:23][0:360][0:575],TO3[0:23][0:360][0:575],PS[0:23][0:360][0:575],time,lat[0:360],lon[0:575], Scheduled for redownload INFO:root:---- End Analysing Directory ---- INFO:root:Preparing new file MERRA2_400.tavg1_2d_slv_Nx.20230422.nc4.nc?TQV[0:23][0:360][0:575],TO3[0:23][0:360][0:575],PS[0:23][0:360][0:575],time,lat[0:360],lon[0:575] from M2T1NXSLV.5.12.4 INFO:root:---- Begin Download ---- CRITICAL:root:% New File from Date 2023-04-22 Integrity Check Failed INFO:root:---- Download Finished ---- INFO:root:---- Begin Analysing Directory ---- ERROR:root:* Found previous corrupted file MERRA2_400.tavg1_2d_aer_Nx.20230422.nc4.nc?TOTSCATAU[0:23][0:360][0:575],TOTEXTTAU[0:23][0:360][0:575],TOTANGSTR[0:23][0:360][0:575],time,lat[0:360],lon[0:575], Scheduled for redownload INFO:root:---- End Analysing Directory ---- INFO:root:Preparing new file MERRA2_400.tavg1_2d_aer_Nx.20230422.nc4.nc?TOTSCATAU[0:23][0:360][0:575],TOTEXTTAU[0:23][0:360][0:575],TOTANGSTR[0:23][0:360][0:575],time,lat[0:360],lon[0:575] from M2T1NXAER.5.12.4 INFO:root:---- Begin Download ---- CRITICAL:root:% New File from Date 2023-04-22 Integrity Check Failed INFO:root:---- Download Finished ---- INFO:root:---- Begin Analysing Directory ---- INFO:root:---- End Analysing Directory ---- INFO:root:Preparing new file MERRA2_101.const_2d_asm_Nx.00000000.nc4.nc4?PHIS[0:0][0:360][0:575],time,lat[0:360],lon[0:575] from M2C0NXASM.5.12.4 INFO:root:---- Begin Download ---- CRITICAL:root:% New File from Date 2023-04-22 Integrity Check Failed INFO:root:---- Download Finished ---- INFO:root:---- Begin Merging In Daily Variables ---- INFO:root:* Processing Data In 2023-04-22... INFO:root:% Merging Data For MERRA2_400.tavg1_2d_aer_Nx.20230422.nc4.nc

Error

ValueError Traceback (most recent call last) Input In [8], in <cell line: 5>() 5 if name == "main": 6 multiprocessing.freeze_support() ----> 7 irradpy.downloader.run(auth={"uid":"o_olalekan", "password": "###"}) 9 # More Examples 10 11 # Download All Data From 2018-01-01 To 2018-01-02 12 irradpy.downloader.run(auth={"uid":"o_olalekan", "password": "###"}, 13 initial_year=2018, final_year=2018, 14 initial_month=1, final_month=1, (...) 19 thread_num=20, connection_num=2 20 )

File ~-admin\Anaconda3\lib\site-packages\irradpy-1.5.0-py3.9.egg\irradpy\downloader\socket.py:122, in run(collection_names, initial_year, final_year, initial_month, final_month, initial_day, final_day, lat_1, lon_1, lat_2, lon_2, merra2_var_dicts, output_dir, auth, merge_timelapse, thread_num) 120 # Call the main function 121 socket = SocketManager() --> 122 socket.daily_download_and_convert( 123 collection_names, merra2_var_dicts=merra2_var_dicts, 124 initial_year=initial_year, initial_month=initial_month, initial_day=initial_day, 125 final_year=final_year, final_month=final_month, final_day=final_day, 126 output_dir=output_dir, 127 auth=auth, 128 merge_timelapse=merge_timelapse, 129 lat_1=lat_1, lon_1=lon_1, 130 lat_2=lat_2, lon_2=lon_2, 131 thread_num=thread_num, 132 )

File ~-admin\Anaconda3\lib\site-packages\irradpy-1.5.0-py3.9.egg\irradpy\downloader\download.py:705, in SocketManager.daily_download_and_convert(self, collection_names, initial_year, final_year, initial_month, final_month, initial_day, final_day, lat_1, lon_1, lat_2, lon_2, merra2_var_dicts, output_dir, auth, merge_timelapse, merge, thread_num) 703 logging.info("---- Begin Merging In Daily Variables ----") 704 for date in self.iter_days(datetime.date(initial_year, initial_month, initial_day), datetime.date(final_year, final_month, final_day)): --> 705 delete_set = self.merge_variables_perday( 706 temp_dir_download, 707 merge_collection_names, 708 final_year, 709 final_month, 710 final_day, 711 date, 712 ) 713 logging.info("# Deleting Daily Redundant Files...") 714 for name in delete_set:

File ~-admin\Anaconda3\lib\site-packages\irradpy-1.5.0-py3.9.egg\irradpy\downloader\download.py:391, in SocketManager.merge_variables_perday(self, path_data, collection_names, final_year, final_month, final_day, date) 389 if var not in collections: 390 collections.append(var) --> 391 remote_ds = xr.open_dataset(name) 392 # subset to desired variables and merge 393 try:

File ~-admin\Anaconda3\lib\site-packages\xarray\backends\api.py:479, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, backend_kwargs, *args, **kwargs) 476 kwargs.update(backend_kwargs) 478 if engine is None: --> 479 engine = plugins.guess_engine(filename_or_obj) 481 backend = plugins.get_backend(engine) 483 decoders = _resolve_decoders_kwargs( 484 decode_cf, 485 open_backend_dataset_parameters=backend.open_dataset_parameters, (...) 491 decode_coords=decode_coords, 492 )

File ~-admin\Anaconda3\lib\site-packages\xarray\backends\plugins.py:152, in guess_engine(store_spec) 144 else: 145 error_msg = ( 146 "found the following matches with the input file in xarray's IO " 147 f"backends: {compatible_engines}. But their dependencies may not be installed, see:\n" 148 "http://xarray.pydata.org/en/stable/user-guide/io.html \n" 149 "http://xarray.pydata.org/en/stable/getting-started-guide/installing.html" 150 ) --> 152 raise ValueError(error_msg)

ValueError: did not find a match in any of xarray's currently installed IO backends ['netcdf4', 'h5netcdf', 'scipy', 'pydap', 'zarr']. Consider explicitly selecting one of the installed engines via the engine parameter, or installing additional IO dependencies, see: http://xarray.pydata.org/en/stable/getting-started-guide/installing.html http://xarray.pydata.org/en/stable/user-guide/io.html

jo-omoyele avatar Jun 21 '23 12:06 jo-omoyele

I am also getting the same ValueError. Did you find a fix for this @jo-omoyele?

simobakk avatar Jan 16 '24 14:01 simobakk