Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

460 multiple results #486

Merged
merged 20 commits into from
Nov 30, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 6 additions & 2 deletions bifacial_radiance/HPCScripts/BasicSimulations/addNewModule.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
import bifacial_radiance
import os

rad_obj = bifacial_radiance.RadianceObj('makemod', 'TEMP')
testfolder = 'TEMP'

rad_obj.getEPW(37.42, -110)
if not os.path.exists(testfolder):
os.makedirs(testfolder)

rad_obj = bifacial_radiance.RadianceObj('makemod', testfolder)

moduletype='tutorial-module'
x = 2
Expand Down
29 changes: 0 additions & 29 deletions bifacial_radiance/HPCScripts/BasicSimulations/dask_template.sbatch

This file was deleted.

51 changes: 51 additions & 0 deletions bifacial_radiance/HPCScripts/BasicSimulations/run_sbatch.sbatch
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/usr/bin/bash

#SBATCH --job-name="demo_run"
#SBATCH --nodes=2
#SBATCH --ntasks-per-node=36
#SBATCH --time=15
#SBATCH --export=ALL
#SBATCH --account=pvsoiling
# --partition=debug
#SBATCH --mail-type=ALL
#SBATCH [email protected]

# Save info
python3 /home/sayala/BasicSimulations/addNewModule.py

#-----------------------
export BASE=/scratch/$USER
mkdir -p $BASE/$SLURM_JOB_ID
cd $BASE/$SLURM_JOB_ID
# Record starting time
date

# Save info
cat $0 > $SLURM_JOB_ID.script
printenv > $SLURM_JOB_ID.env
cp /home/sayala/BasicSimulations/simulate_tracking_gendaylit.py .

# Start up dask scheduler
dask-scheduler --interface ib0 \
--scheduler-file=/scratch/sayala/dask_testing/scheduler.json &

# Wait for scheduler to start
sleep 5

# Start up dask worker on all nodes (Note, script is used to also set
# environment variables on all the nodes. If these were set by default
# (using bash_profile for example), the commented command below could
# be used to start up workers.
srun /home/sayala/BasicSimulations/dask_on_node.sh &

mkdir RUNS
cd RUNS

# Wait for workers to start
sleep 5

# Run script to submit tasks
python3 /home/sayala/BasicSimulations/simulate_tracking_gendaylit.py

# Record ending time
date
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def simulate_single(tilt=None, results_folder_fmt=None, weather_file=None):
sim_general_name = 'bifacial_example'
lat = 37.5
lon = -77.6
moduletype = 'Prism Solar Bi60 landscape'
moduletype = 'tutorial-module'
pitch = 3
clearance_height = 0.2
azimuth = 180
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def simulate_single(daydate=None, results_folder_fmt=None, weather_file=None):
sim_general_name = 'bifacial_example'
lat = 37.5
lon = -77.6
moduletype = 'Prism Solar Bi60 landscape'
moduletype = 'tutorial-module'
gcr = 0.35
hub_height = 0.2

Expand Down
6 changes: 3 additions & 3 deletions bifacial_radiance/load.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,7 @@ def _exportTrackerDict(trackerdict, savefile, reindex=False, monthlyyearly=False
# So we get average hourly irradiance as well as Wh on
# results of power.
D2b = D2.copy()
D2b = D2b.groupby(pd.PeriodIndex(D2b.index, freq="H")).mean().reset_index()
D2b = D2b.groupby(pd.PeriodIndex(D2b.index, freq="H")).mean(numeric_only=True).reset_index()
D2b['BGG'] = D2b['Grear_mean']*100/D2b['Gfront_mean']
D2b['BGE'] = (D2b['Pout']-D2b['Pout_Gfront'])*100/D2b['Pout']
D2b['Mismatch'] = (D2b['Pout_raw']-D2b['Pout'])*100/D2b['Pout_raw']
Expand All @@ -388,7 +388,7 @@ def _exportTrackerDict(trackerdict, savefile, reindex=False, monthlyyearly=False
D3['Mismatch'] = (D3['Pout_raw']-D3['Pout'])*100/D3['Pout_raw']
D3['rowWanted'] = rownum
D3['modWanted'] = modnum
D3m = D2.groupby(pd.PeriodIndex(D2.index, freq="M")).mean().reset_index()
D3m = D2.groupby(pd.PeriodIndex(D2.index, freq="M")).mean(numeric_only=True).reset_index()
D3['temp_air'] = D3m['temp_air']
D3['wind_speed'] = D3m['wind_speed']
D3.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)
Expand All @@ -399,7 +399,7 @@ def _exportTrackerDict(trackerdict, savefile, reindex=False, monthlyyearly=False
D4['Mismatch'] = (D4['Pout_raw']-D4['Pout'])*100/D4['Pout_raw']
D4['rowWanted'] = rownum
D4['modWanted'] = modnum
D4m = D2.groupby(pd.PeriodIndex(D2.index, freq="Y")).mean().reset_index()
D4m = D2.groupby(pd.PeriodIndex(D2.index, freq="Y")).mean(numeric_only=True).reset_index()
D4['temp_air'] = D4m['temp_air']
D4['wind_speed'] = D4m['wind_speed']
D4.drop(columns=['theta', 'surf_tilt', 'surf_azm'], inplace=True)
Expand Down
68 changes: 54 additions & 14 deletions bifacial_radiance/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,12 @@
def _findme(lst, a): #find string match in a list. script from stackexchange
return [i for i, x in enumerate(lst) if x == a]

def _firstlist(l): #find first not-none value in a list. useful for checking multiple keys in dict
try:
return next(item for item in l if item is not None)
except StopIteration:
return None

def _missingKeyWarning(dictype, missingkey, newvalue): # prints warnings
if type(newvalue) is bool:
valueunit = ''
Expand Down Expand Up @@ -371,6 +377,12 @@ def __init__(self, name=None, path=None, hpc=False):
self._setPath(path)
# load files in the /materials/ directory
self.materialfiles = self.returnMaterialFiles('materials')

# store list of columns and methods for convenience / introspection
# TODO: abstract this by making a super class that this inherits
self.columns = [attr for attr in dir(self) if not (attr.startswith('_') or callable(getattr(self,attr)))]
self.methods = [attr for attr in dir(self) if (not attr.startswith('_') and callable(getattr(self,attr)))]


def _setPath(self, path):
"""
Expand Down Expand Up @@ -1053,13 +1065,22 @@ def _parseTimes(t, hour, coerce_year):
return t_out, coerce_year
# end _parseTimes

def _parseMetadataNSRDB(m):
# put correct keys on m = metadata dict

m['altitude'] = _firstlist([m.get('altitude'), m.get('elevation')])
m['TZ'] = _firstlist([m.get('TZ'), m.get('Time Zone'), m.get('timezone')])
m['Name'] = _firstlist([m.get('county'), f"nsrdb_{m.get('Location ID')}"])

try:
m['city'] = (m['county'] + ',' + m['state'] +
',' + m['country'])
except KeyError:
m['city'] = '-'

return m

metadata['TZ'] = metadata['timezone']
metadata['Name'] = metadata['county']
metadata['altitude'] = metadata['elevation']
metadata['city'] = (metadata['county'] + ',' + metadata['state'] +
',' + metadata['country'])
metadata = _parseMetadataNSRDB(metadata)

metdata.rename(columns={'dni': 'DNI',
'dhi': 'DHI',
Expand Down Expand Up @@ -2299,9 +2320,11 @@ def printModules(self):
print('Available module names: {}'.format([str(x) for x in modulenames]))
return modulenames


def addPiles(self, spacingPiles=6, pile_lenx=0.2, pile_leny=0.2, pile_height=None):
'''
Function to add support piles at determined intervals throughout the rows.
TODO: enable functionality or check for scenes using 'clearance_height' ?

Parameters
----------
Expand Down Expand Up @@ -2387,7 +2410,7 @@ def addPiles(self, spacingPiles=6, pile_lenx=0.2, pile_leny=0.2, pile_height=Non


return


def makeScene(self, module=None, sceneDict=None, radname=None,
moduletype=None, appendtoScene=None):
Expand Down Expand Up @@ -2827,7 +2850,8 @@ def analysis1axis(self, trackerdict=None, singleindex=None, accuracy='low',
name = '1axis_%s%s'%(index,customname)
octfile = trackerdict[index]['octfile']
scene = trackerdict[index]['scene']
trackerdict[index]['Results'] = []
if not trackerdict[index].get('Results'):
trackerdict[index]['Results'] = []
if octfile is None:
continue # don't run analysis if the octfile is none
# loop over rowWanted and modWanted. Need to listify it first
Expand All @@ -2837,6 +2861,7 @@ def analysis1axis(self, trackerdict=None, singleindex=None, accuracy='low',
row_mod_pairs = list(itertools.product(rowWanted,modWanted))
for (r,m) in row_mod_pairs:
Results = {'rowWanted':r,'modWanted':m}
if customname: Results['customname'] = customname
try: # look for missing data
analysis = AnalysisObj(octfile,name)
name = '1axis_%s%s'%(index,customname,)
Expand Down Expand Up @@ -3144,7 +3169,8 @@ class GroundObj:
-------

"""

def __repr__(self):
return str(self.__dict__)
def __init__(self, materialOrAlbedo=None, material_file=None, silent=False):
import warnings
from numbers import Number
Expand Down Expand Up @@ -3221,6 +3247,11 @@ def __init__(self, materialOrAlbedo=None, material_file=None, silent=False):
except IndexError as e:
print('albedo.shape should be 3 column (N x 3)')
raise e

# store list of columns and methods for convenience / introspection
# TODO: abstract this by making a super class that this inherits
self.columns = [attr for attr in dir(self) if not (attr.startswith('_') or callable(getattr(self,attr)))]
self.methods = [attr for attr in dir(self) if (not attr.startswith('_') and callable(getattr(self,attr)))]

def printGroundMaterials(self, materialString=None):
"""
Expand Down Expand Up @@ -3626,9 +3657,18 @@ class MetObj:
example, TMY3 data is right-labeled, so 11 AM data represents data from
10 to 11, and sun position should be calculated at 10:30 AM. Currently
SAM and PVSyst use left-labeled interval data and NSRDB uses centered.

Once initialized, the following parameters are available in the MetObj:
-latitude, longitude, elevation, timezone, city [scalar values]

-datetime, ghi, dhi, dni, albedo, dewpoint, pressure, temp_air,
wind_speed, meastracker_angle [numpy.array]

-solpos [pandas dataframe of solar position]

"""

def __repr__(self):
return str(self.__dict__)
def __init__(self, tmydata, metadata, label = 'right'):

import pytz
Expand All @@ -3645,6 +3685,7 @@ def __init__(self, tmydata, metadata, label = 'right'):
self.longitude = metadata['longitude']; lon=self.longitude
self.elevation = metadata['altitude']; elev=self.elevation
self.timezone = metadata['TZ']

try:
self.city = metadata['Name'] # readepw version
except KeyError:
Expand All @@ -3654,10 +3695,9 @@ def __init__(self, tmydata, metadata, label = 'right'):
self.ghi = np.array(tmydata.GHI)
self.dhi = np.array(tmydata.DHI)
self.dni = np.array(tmydata.DNI)
try:
self.albedo = np.array(tmydata.Alb)
except AttributeError: # no TMY albedo data
self.albedo = None
self.albedo = np.array(_firstlist([tmydata.get('Alb'), tmydata.get('albedo'),
tmydata.get('Albedo')]) )
if pd.isnull(self.albedo).all(): self.albedo = None

# Try and retrieve dewpoint and pressure
try:
Expand Down Expand Up @@ -3762,7 +3802,7 @@ def __init__(self, tmydata, metadata, label = 'right'):
self.solpos = pvlib.irradiance.solarposition.get_solarposition(sunup['corrected_timestamp'],lat,lon,elev)
self.sunrisesetdata=sunup
self.label = label

self.columns = [attr for attr in dir(self) if not attr.startswith('_')]

def _set1axis(self, azimuth=180, limit_angle=45, angledelta=None,
backtrack=True, gcr=1.0/3.0, cumulativesky=True,
Expand Down
1 change: 1 addition & 0 deletions docs/sphinx/source/manualapi.rst
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ Weather

RadianceObj.getEPW
RadianceObj.readWeatherFile
RadianceObj.NSRDBWeatherData

Sky Dome
--------
Expand Down
11 changes: 6 additions & 5 deletions docs/sphinx/source/whatsnew/pending.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,16 @@ Release of new version including ...

API Changes
~~~~~~~~~~~~
*A new function can now be called to compile results and report out final irradiance and performance data: :py:class:`~bifacial_radiance.RadianceObj.compileResults`.
* A new function can now be called to compile results and report out final irradiance and performance data: :py:class:`~bifacial_radiance.RadianceObj.compileResults`.
* Results generated with the above can be saved with the :py:class:`~bifacial_radiance.RadianceObj.exportTrackerDict`, which saves an Hourly, Monthly and Yearly .csvs in the results folder.
*Multiple modules and rows can now be selected in a single analysis scan. ``modWanted`` and ``rowWanted`` inputs in :py:class:`~bifacial_radiance.RadianceObj.analysis1axis` can now be a list, to select multiple rows and modules for scans. (:issue:`405`)(:pull:`408`)
*To support multiple modules and row scans for 1axis simulations, outputs like Wm2Front are now stored in ``trackerdict``.``Results`` (:issue:`405`)(:pull:`408`)
* ``mismatch.mad_fn`` has new functionality and input parameter `axis`. If a 2D matrix or dataframe is passed in as data, MAD is calculated along the row (default) or along the columns by passing 'axis=1'
* Multiple modules and rows can now be selected in a single analysis scan. ``modWanted`` and ``rowWanted`` inputs in :py:class:`~bifacial_radiance.RadianceObj.analysis1axis` can now be a list, to select multiple rows and modules for scans. (:issue:`405`)(:pull:`408`)
* To support multiple modules and row scans for 1axis simulations, outputs like Wm2Front are now stored in ``trackerdict``.``Results`` (:issue:`405`)(:pull:`408`)
* ``mismatch.mad_fn`` has new functionality and input parameter `axis`. If a 2D matrix or dataframe is passed in as data, MAD is calculated along the row (default) or along the columns by passing 'axis=1' (:issue:`449`)(:pull:`485`)
* NSRDB weather data can now be loaded using :py:class:`~bifacial_radiance.RadianceObj.NSRDBWeatherData`.

Enhancements
~~~~~~~~~~~~

* :py:class:`~bifacial_radiance.RadianceObj` and :py:class:`~bifacial_radiance.GroundObj` and :py:class:`~bifacial_radiance.MetObj` now have `self.columns` and `self.methods` introspection to list data columsn and methods available



Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[
'pandas',
'pandas >= 1.3.0',
'pvlib >= 0.8.0',
'pvmismatch',
'configparser',
Expand Down
1 change: 1 addition & 0 deletions tests/nsrdb_boulder_metadata.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"Source": "NSRDB", "Location ID": "149190", "City": "-", "State": "-", "Country": "-", "Time Zone": -7, "Local Time Zone": -7, "Dew Point Units": "c", "DHI Units": "w/m2", "DNI Units": "w/m2", "GHI Units": "w/m2", "Temperature Units": "c", "Pressure Units": "mbar", "Wind Direction Units": "Degrees", "Wind Speed Units": "m/s", "Surface Albedo Units": "N/A", "Version": "3.2.0", "latitude": 40.01, "longitude": -105.26, "altitude": 1636}
Loading
Loading