#! /usr/opt/bin/python #################################################################### # rt_graph_update.py # rt_www_rev='2.30' # # Feb 1999 # by dbriar #################################################################### # # Graph NWIS parameters whenever new data is # available for a station. If the parameter at a station is currectly # turned off, the program will only generate a graph if time # is 01:00 or if station numbers or keyword "all" is supplied # on the command line. If there is no data for last 7 days, # a graph will not be generated under any circumstances. # #################################################################### import time startsecs=time.time() # ------------------------------------------------------------------------ # -- Set RT_HOME and do system level househeeping # ------------------------------------------------------------------------ RT_HOME="/www/htdocs/rt" logsdir= RT_HOME+'/logs' datadir= RT_HOME+'/data' pubdir= RT_HOME+'/pub' # -- Import standard modules import sys,posix,DateTime,RDB,string from math import log10 # -- Alias os to posix because posix loads faster at Python 1.2 os = posix # -- Import rt modules sys.path.insert(1,RT_HOME) # Adds RT_HOME to python module search path from rt_config_defs import * os.umask(002) # ------------------------------------------------------------------------ # -- Check OS and set symlinks for compiled programs # ------------------------------------------------------------------------ sysname, nodename, release, version, machine = posix.uname() if sysname == 'dgux': posix.unlink(RT_HOME+'/lib/gdmodule.so') posix.symlink( RT_HOME+'/lib/gdmodule.so.dg', RT_HOME+'/lib/gdmodule.so') else: posix.unlink(RT_HOME+'/lib/gdmodule.so') posix.symlink( RT_HOME+'/lib/gdmodule.so.sun', RT_HOME+'/lib/gdmodule.so') import gd # ------------------------------------------------------------------------ # -- Set command aliases and rt defaults # ------------------------------------------------------------------------ # -- Command aliases os_system = os.system DTDT = DateTime.DateTime split = string.split zfill = string.zfill upper = string.upper lower = string.lower digits = string.digits splitfields = string.splitfields atof = string.atof atoi = string.atoi rjust = string.rjust find = string.find join = string.join joinfields = string.joinfields # ------------------------------------------------------------------------ # -- GD constants # ------------------------------------------------------------------------ # -- Generate triangle gif mrk=gd.image((9,10)) blue=mrk.colorAllocate((0,0,255)) tran=mrk.colorAllocate((254,254,254)) mrk.filledRectangle((0,0),(9,10),tran) mrk.colorTransparent(tran) mrk.filledPolygon(((0,9),(0,8),(4,0),(8,8),(8,9),(0,9)),blue) #mrk.writeGif('mrk.gif') # -- Create a 1x3 blue pixel brush brush=gd.image((1,3)) blueBrush = brush.colorAllocate((0,0,255)) brush.filledRectangle((0,0),(1,3), blueBrush) # -- Create a 1x2 blue pixel brush floodbrush=gd.image((1,2)) floodbrush.filledRectangle((0,0),(1,2), blueBrush) # -- Read UGSG logo logo=gd.image(RT_HOME+'/icons/water_banner4.gif') # ====================================================================== # -- Functions # ====================================================================== # # -- Convert datetime to DT object and calc xpx # def make_dt_xpx(uvL, uvlen, startdate, output): #tmp = time.time() DTDT = DateTime.DateTime # -- Purge data prior to start of graph try: while 1: try: if DTDT(uvL[0][0]) < startdate: del(uvL[0]) else: break except: del(uvL[0]) except IndexError: pass # -- If all records preceed startdate, uvlen = 0, return uvlen=len(uvL) if not uvlen: return [''], [''], uvlen, [''] # -- Scan uvL and remove lines with invalid date strings bad_date_stringL=[] for i in range(uvlen): try: uvL[i][0]=DTDT(uvL[i][0]) except: output('== skipping record with bad date string '+str(uvL[i][0])+'\n') bad_date_stringL.append(i) if bad_date_stringL: bad_date_stringL.reverse() for i in bad_date_stringL: del uvL[i] L_int = int # -- local alias dt_xpx = ['']*uvlen # -- datetime x-coord for unit value uv_ranges = [] # -- list of tuples of begin/end of continuous record rng_start = 0 # -- holder for start of present range last_dt = DTDT(uvL[0][0]) # -- first record for missing record check uvlen = len(uvL) for i in range(uvlen): new_dt = uvL[i][0] dt_xpx[i] = 70+L_int((new_dt-startdate)*60) # -- 60 => px per day # -- If more than 1 hour gap in record, store new range segment if new_dt - last_dt > 0.042: uv_ranges.append((rng_start,i-1)) rng_start = i print_dt = str(last_dt) if len(str(last_dt)) == 13: print_dt=print_dt+':00' elif len(str(last_dt)) == 10: print_dt=print_dt+' 00:00' output('== Missing record beginning '+print_dt+' at recno '+str(i)+'\n') last_dt=new_dt # -- Append last range segment to uv_ranges uv_ranges.append((rng_start,i)) #output(' dt_xpx time = '+str(round(time.time()-tmp,4))+'\n') return dt_xpx, uv_ranges, uvlen, uvL # # -- Make float parameter values # def makefloat(uvL, uvlen, ic_parm, output): #tmp=time.time() parmL = [] DT_cmtD = {} # -- Dictionary mapping date to tbl_comment (ex. "Ice") DT_errD = {} # -- Dictionary mapping date to ADAPS threshold flag (ex. "H") atof = string.atof for i in range(uvlen): val = uvL[i][ic_parm] # -- If not null, make float, append parmL, and # -- assign float back into uvL if val: try: val = atof(val) uvL[i][ic_parm] = val parmL.append(val) except ValueError: # -- If length gt 1, must be comment (Ice etc) # -- = DateTime-(1 minute) to avoid today at 24:00 plotting as tomorrow. # -- = comment string if len(val) > 1: DT_cmtD[ int( uvL[i][0] - 0.0007 ) ] = val # -- Else must be ADAPS threshold flag else: DT_errD[ int( uvL[i][0] - 0.0007 ) ] = val else: # -- Assign 'N' to force exception in gensegs loops uvL[i][ic_parm]='N' #output(' makefloat time = '+str(round(time.time() - tmp,4))+'\n') return uvL, parmL, DT_cmtD, DT_errD # # -- Build segments list for arithmetic graphs # def gensegs_arith(uvL, dt_xpx, y_max, y_min, uv_ranges, icol, output): #start = time.time() segs = [] y_diff = y_max-y_min L_int = int factor = float(ypx_diff)/y_diff # -- For each range of continuous record in uvfile for rng_start, rng_end in uv_ranges: tmp=[] tmp_append = tmp.append # -- Step through each range for i in range(rng_start,rng_end+1): # -- If val not null append x and y pixels try: ypx = ypx_axis_bot-L_int((uvL[i][icol]-y_min)*factor) tmp_append((dt_xpx[i],ypx)) except TypeError: if tmp: segs.append(tmp) tmp=[] tmp_append=tmp.append if tmp: if len(tmp) == 1: # -- Single point, add 2 pixel line xpx, ypx = tmp[0] tmp.append((xpx+2,ypx)) segs.append(tmp) #output(' gensegs_arith time = '+str(round(time.time()-start,4))+'\n') return segs # # -- Build segments list for log graphs # def gensegs_log(uvL, dt_xpx, y_max, y_min, uv_ranges, icol, output): #start=time.time() skip = 1 segs = [] L_int = int L_log10 = log10 y_min_log = L_log10(y_min) y_diff_log = L_log10(y_max)-L_log10(y_min) factor = float(ypx_diff)/y_diff_log # -- For each range of continuous record in uvfile for rng_start, rng_end in uv_ranges: tmp=[] tmp_append = tmp.append # -- Step through each range for i in range(rng_start,rng_end+1): # -- If val not null (ie = 'N') append x and y pixels try: ypx=ypx_axis_bot-L_int((L_log10(uvL[i][icol])-y_min_log)*factor) tmp_append((dt_xpx[i],ypx)) except TypeError: if tmp: segs.append(tmp) tmp=[] tmp_append=tmp.append if tmp: if len(tmp) == 1: # -- Single point, add 2 pixel line xpx, ypx = tmp[0] tmp.append((xpx+2,ypx)) segs.append(tmp) #output(' gensegs_log time = '+str(round(time.time()-start,4))+'\n') return segs # # -- Determine y-axis max and min for log axes # def y_maxmin_log(parm, p_max, p_min, output): if p_max >= 1 : p_int = int(p_max) omag_max = pow(10, len(str(p_int)) - 1) num = int(p_max / omag_max) y_max = (num + 1) * int(omag_max) else: num = int(p_max * 10) y_max = (num + 1) / 10.0 if p_min >= 1 : p_int = int(p_min) omag_min = pow(10, len(str(p_int)) - 1) num = int((p_min + (omag_min * 0.7)) / omag_min) - 1 # -- If even order of magnitude if num == 0: if omag_min == 1: y_min = .5 else: num = 5 omag_min = omag_min/10 y_min=num * int(omag_min) else: y_min=num * int(omag_min) elif 1 > p_min > .5: y_min= .5 elif .5 >= p_min > .1: y_min= .1 elif .1 >= p_min > .05: y_min= .05 elif .05 >= p_min >= .01: y_min= .01 # -- Force to decimal then string y_max_str = str(round(y_max,2)) p_max_str = str(round(p_max,2)) p_min_str = str(round(p_min,2)) y_min_str = str(round(y_min,2)) # -- Loop thru to get number of decimals (0-2) integer = 1 # true dec_places = 0 for strval in y_max_str, p_max_str, p_min_str, y_min_str: dec_pos = find(strval,'.') dec_str = strval[dec_pos+1:] if len(dec_str) == 2: integer=0 dec_places=2 break elif not dec_str == '0': integer=0 dec_places=1 # -- Force to same number of decimals if integer: y_max_str = y_max_str[:find(y_max_str,'.')] p_max_str = p_max_str[:find(p_max_str,'.')] p_min_str = p_min_str[:find(p_min_str,'.')] y_min_str = y_min_str[:find(y_min_str,'.')] elif dec_places == 2: if y_max_str[-2] == '.': y_max_str = y_max_str+'0' if p_max_str[-2] == '.': p_max_str = p_max_str+'0' if p_min_str[-2] == '.': p_min_str = p_min_str+'0' if y_min_str[-2] == '.': y_min_str = y_min_str+'0' parm_name = ' ' if parm == 'p00060': parm_name = 'Discharge ' add_cols=0 if dec_places > 0: add_cols=dec_places+1 output('== '+parm_name+' -- y_max '+string.rjust(y_max_str,6+add_cols)+'\n') output('== '+parm+'_max '+string.rjust(p_max_str,6+add_cols)+'\n') output('== '+parm+'_min '+string.rjust(p_min_str,6+add_cols)+'\n') output( '== -- y_min '+string.rjust(y_min_str,6+add_cols)+'\n') return y_max, y_min # # -- Determine y-axis max and min for arithmetic axes # def y_maxmin_arith(parm, p_max, p_min, output): parm_diff = p_max - p_min y_step = 1 if parm_diff >= 100: parm_diff_int = int(parm_diff) omag = pow(10, len(str(parm_diff_int)) - 1) y_step = omag elif parm_diff >= 50: y_step = 10 elif 10 <= parm_diff < 50: y_step = 5 elif 5 <= parm_diff < 10: y_step = 2 elif 1 <= parm_diff < 5: y_step = 1 elif parm_diff < 1: y_step = .1 # -- Special condition for rainfall if parm == 'p00045' and parm_diff < .5: y_step=.05 if y_step < 1: y_max = round(round(p_max/y_step,1)*y_step + (y_step*2),1) y_min = round(round(p_min/y_step,1)*y_step - (y_step*2),1) else: y_max = int(round(p_max/y_step,0)*y_step + y_step) y_min = int(round(p_min/y_step,0)*y_step - y_step) # -- Special condition for rainfall--set min to 0 and # -- adjust max if parm == 'p00045': y_min = 0 while y_max > 0.10: if y_max > p_max + (1.2 * y_step): y_max = y_max - y_step else: break # -- Format strings for screen output of max-min info y_max_str = str(round(y_max,2)) if y_max_str[-2] == '.': y_max_str = y_max_str+'0' p_max_str = str(round(p_max,2)) if p_max_str[-2] == '.': p_max_str = p_max_str+'0' p_min_str = str(round(p_min,2)) if p_min_str[-2] == '.': p_min_str = p_min_str+'0' y_min_str = str(round(y_min,2)) if y_min_str[-2] == '.': y_min_str = y_min_str+'0' parm_name = ' ' if parm == 'p00065': parm_name = 'Stage ' elif parm == 'p00060': parm_name = 'Discharge ' elif parm == 'p00045': parm_name = 'Rainfall ' output('== '+parm_name+' -- y_max '+rjust(y_max_str,10)+'\n') output('== '+parm+'_max '+rjust(p_max_str,10)+'\n') output('== '+parm+'_min '+rjust(p_min_str,10)+'\n') output( '== -- y_min '+rjust(y_min_str,10)+'\n') return y_max, y_min, y_step # ---------------------------------------------------------------------- # -- FUNCTION -- gen_gif # ---------------------------------------------------------------------- def gen_gif(stn, name, parm, segments, type, y_max, y_min, y_step=0, \ ypx_flood=0, flood_text, stats, stats_type, min_period, modayhrmin, \ output, format, DT_cmtD, DT_errD): #dstart=time.time() # -- Function to draw big text horizontally def bigtext(img, font, t_xpx, t_ypx, text, color): for char in text: img.string(font, (t_xpx-1,t_ypx-1), char, color) img.string(font, (t_xpx ,t_ypx-1), char, color) img.string(font, (t_xpx ,t_ypx ), char, color) img.string(font, (t_xpx-1,t_ypx ), char, color) t_xpx=t_xpx+10 # -- Create blank gif img=gd.image((xpx_max,ypx_gif)) # -- Local aliases img_line = img.line img_lines = img.lines img_string = img.string img_stringUp = img.stringUp gd_fontstrsize = gd.fontstrsize gd_font = gd.gdFontMediumBold # -- Define colors true_black = img.colorAllocate((0,0,0)) img.colorTransparent(true_black) black = img.colorAllocate((0,0,1)) white = img.colorAllocate((255,255,255)) grey = img.colorAllocate((175,175,175)) ltgrey= img.colorAllocate((210,210,210)) # -- If presentation if format == 'presentation': # -- Fill background white img.filledRectangle((1,1),(xpx_max-2,ypx_gif-2),white) # -- Add banner logo.copyTo(img, (5,5),(0,0)) # -- Write title name=str(stn)+' '+name xpx_size = len(name)*10 ypx_size = 10 xpx_title = (xpx_max / 2)-(xpx_size / 2) t_xpx = xpx_title t_ypx = ypx_axis_top-29 font = gd.gdFontLarge bigtext(img, font, t_xpx, t_ypx, name, black) # -- Add coop credit try: rfo = open(RT_HOME+'/data/coop/'+stn+'.coop') text = rfo.read() # -- Parse out html tags if present while find(text, '<') >= 0: starttag = find(text, '<') endtag = find(text, '>') if starttag >= 0: text = text[:starttag]+text[endtag+1:] words = split(text) rfo.close() line = '' t_ypx = 37 t_xpx = 70 maxline = 30 if len(text) > 150: t_xpx = 30 maxline = 35 if len(text) > 200: t_xpx=15 maxline = 40 for i in range(len(words)): if len(line) + len(words[i]) < maxline: line = line+words[i]+' ' else: img.string(gd_font, (t_xpx,ypx_axis_bot+t_ypx), line, black) line = words[i]+' ' t_ypx = t_ypx+12 if line: img.string(gd_font, (t_xpx,ypx_axis_bot+t_ypx), line, black) except: pass # -- Add provisional statement red = img.colorAllocate((255,0,0)) text='Provisional Data Subject To Revision' bigtext(img, gd.gdFontGiant, (xpx_max/2)-180, ypx_gif-25, text, red) # -- Set long date string from station data date-time string in cache data_mon=atoi(modayhrmin[0:2]) data_day=atoi(modayhrmin[2:4]) data_hr =atoi(modayhrmin[4:6]) data_min=atoi(modayhrmin[6:8]) # -- If month of last data date > present month, assume year boundary if data_mon > DateTime.now().month(): data_yr=DateTime.now().year() -1 else: data_yr=DateTime.now().year() tyr,tmo,tdy,thr,tmn,tsc,txx,tyy,dst=time.localtime(time.time()) data_dt=time.ctime(time.mktime((data_yr,data_mon,data_day,data_hr,data_min,0,0,0,dst))) data_dt=data_dt[:16]+data_dt[-5:] # -- Draw day grid lines img_line((130, ypx_axis_top), (130,ypx_axis_bot),ltgrey) img_line((190, ypx_axis_top), (190,ypx_axis_bot),ltgrey) img_line((250, ypx_axis_top), (250,ypx_axis_bot),ltgrey) img_line((310, ypx_axis_top), (310,ypx_axis_bot),ltgrey) img_line((370, ypx_axis_top), (370,ypx_axis_bot),ltgrey) img_line((430, ypx_axis_top), (430,ypx_axis_bot),ltgrey) img_line((490, ypx_axis_top), (490,ypx_axis_bot),ltgrey) # -- Draw graph frame img_lines(( ( 70, ypx_axis_top), (550, ypx_axis_top), (550,ypx_axis_bot), ( 70,ypx_axis_bot), ( 70, ypx_axis_top) ), grey) # ----------------------------------------------------------- # -- Label x-axis with graph period dates # ----------------------------------------------------------- # -- Map month number to short and long month names mo_name_long={1:"January",2:"February", 3:"March", 4:"April", 5:"May", 6:"Jun", \ 7:"July", 8:"August", 9:"September", 10:"October", 11:"November", \ 12:"December"} # -- Get DateTime values for graph period today=int(DateTime.now()) today=DateTime.DateTime(today) graph_dates=[] for i in (-6,-5,-4,-3,-2,-1,0,1): graph_dates.append(today+i) # - Init xpx for day labels list xpx_lab=[100,160,220,280,340,400,460,520] # -- Write ADAPS threshold flags if present if DT_errD.keys() and format <> 'presentation': keysL=DT_errD.keys() keysL.sort() for day in keysL: try: day_index = graph_dates.index(day) xpx_center = xpx_lab[day_index] data_off_label = 'v' # -- Plot data-off label( xpx_size,ypx_size=gd_fontstrsize(gd_font,data_off_label) img_string(gd_font, (xpx_center-(xpx_size/2),ypx_axis_bot-255), data_off_label, black) img_string(gd_font, (xpx_center-(xpx_size/2),ypx_axis_bot-259), '.', black) except: pass # -- Populate dy_labL[], mo_labD{}, and yr_labD{} # - init month-label dict and year_count dict yr_moD={} cal_yearD={} for i in range(8): # -- Plot days day=str(graph_dates[i].day()) xpx_size,ypx_size=gd_fontstrsize(gd_font,day) img_string(gd_font, (xpx_lab[i]-(xpx_size/2),ypx_axis_bot+3), day, black) # -- Accumulate number of calander years cal_yearD[graph_dates[i].year()]=1 # -- Assign/append xpx as value to yr_moD at key (year,month) year_month=(graph_dates[i].year(), graph_dates[i].month()) if yr_moD.has_key(year_month): yr_moD[year_month].append(xpx_lab[i]) else: yr_moD[year_month]=[xpx_lab[i]] num_cal_years=len(cal_yearD.keys()) # -- Plot month label(s) yr_mo_keysL=yr_moD.keys() yr_mo_keysL.append((9999,0)) yr_mo_keysL.sort() mo_lab_xpxL=[] for year, month in yr_mo_keysL: if month: # -- Get month name text month_name=mo_name_long[month] if num_cal_years == 2 and len(yr_moD[(year,month)]) == 1: month_name=month_name[:3] # -- If single month or multiple years, append year to month string if len(yr_mo_keysL) == 2 or num_cal_years > 1 : month_label=month_name+' '+str(year) else: month_label=month_name # -- Get mean of day locations sum=0 for val in yr_moD[(year,month)]: sum=sum+val mo_lab_xpx=sum/len(yr_moD[(year,month)]) mo_lab_xpxL.append(mo_lab_xpx) # -- Plot month label(s) xpx_size,ypx_size=gd_fontstrsize(gd_font,month_label) img_string(gd_font, (mo_lab_xpx-(xpx_size/2),ypx_axis_bot+15), month_label, black) #bigtext(img, gd.gdFontGiant, mo_lab_xpx-(xpx_size/2), ypx_axis_bot+15, month_label, black) # -- If single calendar year but 2 months, place year label between months if num_cal_years < 2 and len(mo_lab_xpxL) == 2: yr='--- '+str(yr_mo_keysL[0][0])+' ---' yr_xpx = (mo_lab_xpxL[0]+mo_lab_xpxL[1])/2 xpx_size,ypx_size = gd_fontstrsize(gd_font,yr) img_string(gd_font, (yr_xpx-(xpx_size/2),ypx_axis_bot+15), yr, black) # ------------------------------------------------------------ # -- Define y-axis tics and labels: draw tics and grid lines # -- ytics[] data values at each mag/10 step # -- ypx_ytics[] y pixel values for ytics # -- ylab_tpls[] y label tulples (ypx,value) # -- y_step step between tics (variable) # ------------------------------------------------------------ if type == 'log': minor_tics = 2 y_diff = y_max-y_min ytics = [] ylab_tpl = [] if y_max >= 1: omag_max = round(pow(10,int(log10(y_max))),0) elif .1 <= y_max < 1: omag_max=.1 elif .01 <= y_max < .1: omag_max=.01 if y_min >= 1: omag_min = round(pow(10,int(log10(y_min))),0) elif .1 <= y_min < 1: omag_min=.1 elif .01 <= y_min < .1: omag_min=.01 y_step = omag_min # -- Set step and minor_tics if single order of magnitude if omag_min == omag_max or omag_max == y_max: if int(y_diff / omag_min) == 1 : y_step=omag_min / 10 elif int(y_diff / omag_min) == 2 : y_step=omag_min / 5 elif int(y_diff / omag_min) == 3 : y_step=omag_min / 2 minor_tics = 5 elif int(y_diff / omag_min) < 6 : minor_tics=5 else: y_step = omag_min # -- Determine major tics y_step=round(y_step,2) # force to 2 digits tic=round(float(y_min),2) # force to 2 digits omag_cur=round(omag_min,2) chg_omag_vals=[] # store changes in magnitude ytics.append(round(float(y_min),2)) while 1: tic=tic+y_step # -- If omag changes, change y_step if tic + y_step > omag_cur * 10: omag_cur=omag_cur*10 chg_omag_vals.append(round(omag_cur,2)) if y_max/omag_cur < 3: y_step=omag_cur/5 elif y_max/omag_cur < 5: y_step=omag_cur/2 else: y_step=omag_cur # -- Append tic value if less that y_max if tic < y_max: ytics.append(tic) else: break ytics.append(round(float(y_max),2)) # -- Get ypx for tics ypx_tics =[] y_min_log =log10(y_min) y_diff_log =log10(y_max)-log10(y_min) for i in ytics: ypx_tics.append(ypx_axis_bot-int((((log10(float(i))-y_min_log)/y_diff_log)*ypx_diff))) # -- Define and locate y labels # -- Force labels at omag changes ypx_chg_omag1=0 ypx_chg_omag2=0 if len(chg_omag_vals) >= 1: val=chg_omag_vals[0] ypx_chg_omag1=ypx_axis_bot-int((((log10(float(val))-y_min_log)/y_diff_log)*ypx_diff)) ylab_tpl.append((ypx_chg_omag1,val)) if len(chg_omag_vals) == 2: val=chg_omag_vals[1] ypx_chg_omag2=ypx_axis_bot-int((((log10(float(val))-y_min_log)/y_diff_log)*ypx_diff)) ylab_tpl.append((ypx_chg_omag2,val)) # -- Append (ypx,y_min) to label list; y_min == ytics[0] ylab_tpl.append((ypx_tics[0],ytics[0])) # -- Check spacing for rest of labels ypx_last=ypx_tics[0] for i in range(1,len(ypx_tics)-1): ypx_cur=ypx_tics[i] if ypx_last - ypx_cur > 15 and \ abs(ypx_cur - ypx_chg_omag1) > 15 and \ abs(ypx_cur - ypx_chg_omag2) > 15: ylab_tpl.append((ypx_cur,ytics[i])) ypx_last=ypx_cur # -- Append (ypx,y_max) to label list; y_max == tics[-1] ylab_tpl.append((ypx_tics[-1],ytics[-1])) # -- Draw y-axis grid lines including minor tics in ltgrey for i in range(1,len(ypx_tics)): incval=(ytics[i]-ytics[i-1])/minor_tics for j in range(minor_tics-1): midval=ytics[i-1]+incval*(j+1) ypx_mid_val=ypx_axis_bot-int((((log10(midval)-y_min_log)/y_diff_log)*ypx_diff)) img_line((70,ypx_mid_val),(550,ypx_mid_val),ltgrey) img_line((70,ypx_tics[i]),(550,ypx_tics[i]),grey) # -- Type arith else: y_diff = y_max-y_min ypx_tics = [] ylab_tpl = [] ytics = [round(y_min,2)] tic = round(y_min,2) y_step = round(y_step,2) while 1: tic=tic+y_step if tic <= y_max+(y_diff/100): ytics.append(tic) else: break for i in ytics: ypx=ypx_axis_bot-int((((float(i)-y_min)/y_diff)*250)) ylab_tpl.append((ypx,i)) ypx_tics.append(ypx) # -- Draw y-axis grid and labels len_ypx_tics = len(ypx_tics) ypx_half_step= (ypx_diff/2)/(len_ypx_tics-1) # -- Add grid lines at tics; add half line if < 6 labels for i in range(1,len_ypx_tics): if len_ypx_tics < 7: img_line((70,ypx_tics[i]+ypx_half_step),(550,ypx_tics[i]+ypx_half_step),grey) img_line((70,ypx_tics[i]),(550,ypx_tics[i]),grey) # ------------------------------------------ # -- Draw y-axis labels (both types of axes) # ------------------------------------------ # -- Loop thru all ylabels to get number of decimals (0-2) integer = 1 # true dec_places = 0 thousands = 0 # false ylab_tpl.sort() ylab_tpl.reverse() for ypx, ylab in ylab_tpl: ylabstr = str(ylab) dec_pos = find(ylabstr,'.') dec_str = ylabstr[dec_pos+1:] if len(dec_str) == 2: integer=0 dec_places=2 elif not dec_str == '0': integer=0 # -- Determine longest label to format for thousands if necessary ypx_ylab, ylab = ylab_tpl[-1] if integer: ylab=str(int(ylab)) if len(ylab) > 5: thousands=1 else: ylab=str(round(ylab,2)) dec_pos = find(ylab,'.') if dec_pos > 0: ylab=ylab[:dec_pos] if len(ylab) > 5: thousands=1 # -- For each label for ypx_ylab, ylab in ylab_tpl: # -- Int or round and force to string if integer: ylab=str(int(ylab)) else: ylab=str(round(ylab,2)) # -- Append second decimal if necessary if dec_places == 2 and ylab[-2] == '.': ylab=ylab+'0' # -- Catch minus sign minus='' if ylab[0] == '-': minus='-' ylab=ylab[1:] # -- Format for thousands comma if necessary dec_pos = -1 # init dec_append = '' # init if thousands: if len(ylab) > 3: ylab=ylab[:-3] else: ylab='.'+ylab[0] else: dec_pos = find(ylab,'.') if dec_pos > 0: dec_append=ylab[dec_pos:] ylab=ylab[:dec_pos] if len(ylab) > 3: ylab=ylab[:-3]+','+ylab[-3:] label=minus+ylab+dec_append # -- Store last label as max for placing y_axis explanation max_ylab=label # -- Write to img xpx_size,ypx_size=gd_fontstrsize(gd_font,label) img_string(gd_font, (66-xpx_size,ypx_ylab-(ypx_size/2)), label, black) #output(' GD first half time = '+str(round(time.time()-gdstart,4))+'\n') # -- Write new data img.setBrush(brush) for seg in segments: img_lines((seg), gd.gdBrushed) # -- Place data-off labels if present if DT_cmtD.keys(): keysL=DT_cmtD.keys() keysL.sort() for day in keysL: if day in graph_dates: day_index = graph_dates.index(day) xpx_center = xpx_lab[day_index] data_off_label = '<-'+DT_cmtD[day]+'->' # -- Plot data-off label( xpx_size,ypx_size=gd_fontstrsize(gd_font,data_off_label) img_string(gd_font, (xpx_center-(xpx_size/2),ypx_axis_bot-125), data_off_label, black) # -- Write stats for i in range(len(stats)-1): if stats[i]: ypx_stat=stats[i]-5 xpx_stat=xpx_lab[i]-4 mrk.copyTo(img,(xpx_stat,ypx_stat),(0,0),(9,10)) # -- Write flood line if ypx_tics[-1] < ypx_flood < ypx_tics[0]: img.setBrush(floodbrush) img.setStyle((0,0,0,0,1,1,1,1)) img_line((70, ypx_flood), (550, ypx_flood), gd.gdStyledBrushed) img_string(gd_font, (80,ypx_flood-15), flood_text, black) # -- Y-axis title if parm == 'p00065': short_parm='s' xpx_size,ypx_size=gd_fontstrsize(gd_font,max_ylab) xpx_tmp=((65-ypx_size)-xpx_size)/2 img_stringUp(gd_font, (xpx_tmp,ypx_axis_bot-30), "STAGE, IN FEET ABOVE DATUM", black) elif parm == 'p00060': short_parm='q' xpx_size,ypx_size=gd_fontstrsize(gd_font,max_ylab) xpx_tmp=((61-ypx_size)-xpx_size)/2 if thousands: img_stringUp(gd_font, (xpx_tmp,ypx_axis_bot), "STREAMFLOW, IN THOUSANDS OF FT3 / SEC", black) else: img_stringUp(gd_font, (xpx_tmp,ypx_axis_bot), "STREAMFLOW, IN CUBIC FEET PER SECOND", black) elif parm == 'p00045': short_parm='r' xpx_size,ypx_size=gd_fontstrsize(gd_font,max_ylab) xpx_tmp=((61-ypx_size)-xpx_size)/2 img_stringUp(gd_font, (xpx_tmp,ypx_axis_bot-20), "INCREMENTAL RAINFALL, IN INCHES", black) # -- Write explanation # -- Streamflow if parm == 'p00060': if format == 'presentation': # -- Explanation box # -- Explanation box ul = (310, ypx_axis_bot+ 38) ur = (550, ypx_axis_bot+ 38) lr = (550, ypx_axis_bot+ 108) ll = (310, ypx_axis_bot+ 108) img_lines((ul, ur, lr, ll, ul), grey) # -- Time string img_string(gd_font, (317,ypx_axis_bot+ 43), data_dt , black) # -- Line and STREAMFLOW text img.setBrush(brush) img_line((317,ypx_axis_bot+67),(340,ypx_axis_bot+67), gd.gdBrushed) img_string(gd_font, (350,ypx_axis_bot+60), "STREAMFLOW", black) # -- Statistic mrk.copyTo(img,(324,ypx_axis_bot+77),(0,0),(9,10)) img_string(gd_font, (350,ypx_axis_bot+77), stats_type+" DAILY STREAMFLOW,", black) if min_period > 1: img_string(gd_font, (354,ypx_axis_bot+89), 'based on '+str(min_period)+' years of record' , black) else: img_string(gd_font, (354,ypx_axis_bot+89), ' -- unavailable --' , black) else: img.setBrush(brush) img_line((80,ypx_axis_bot+40),(120,ypx_axis_bot+40), gd.gdBrushed) img_string(gd_font, (130,ypx_axis_bot+33), "STREAMFLOW", black) # -- Statistic mrk.copyTo(img,(307,ypx_axis_bot+33),(0,0),(9,10)) img_string(gd_font, (325,ypx_axis_bot+33), stats_type+" DAILY STREAMFLOW,", black) if min_period > 1: img_string(gd_font, (335,ypx_axis_bot+45), 'based on '+str(min_period)+' years of record' , black) else: img_string(gd_font, (335,ypx_axis_bot+45), ' -- unavailable --' , black) elif parm == 'p00065' or parm == 'p00045': if format == 'presentation': # -- Explanation box ul = (310, ypx_axis_bot+ 38) ur = (550, ypx_axis_bot+ 38) lr = (550, ypx_axis_bot+ 88) ll = (310, ypx_axis_bot+ 88) img_lines((ul, ur, lr, ll, ul), black) # -- Time string img_string(gd_font, (317,ypx_axis_bot+ 45), data_dt , black) # -- Line and STAGE text img.setBrush(brush) img_line((317,ypx_axis_bot+72),(340,ypx_axis_bot+72), gd.gdBrushed) if parm == 'p00065': img_string(gd_font, (350,ypx_axis_bot+65), "STAGE, in feet above datum", black) else: img_string(gd_font, (350,ypx_axis_bot+65), "RAINFALL, in inches", black) # -- Finish and output format_tag = '' outgif = '' if format == 'presentation': format_tag='P' outgif = stn+'_'+short_parm+'_'+modayhrmin[2:]+format_tag+'.gif' img.interlace(1) #gdwrite=time.time() img.writeGif(pubdir+'/'+stn+'_'+short_parm+'_'+modayhrmin[2:]+format_tag+'.gif') #output(' GD write time = '+str(round(time.time()-gdwrite,4))+'\n') #output(' GD TOTAL time = '+str(round(time.time()-gdstart,4))+'\n') return outgif # -------------------------------------------------------------------- # -- Function rm_gifs() # -------------------------------------------------------------------- def rm_gifs(sdatT, output): # -- Initialize variable and lists os_unlink = os.unlink os_stat = os.stat gifnamesD = {} gifnamesD_has_key=gifnamesD.has_key rm_gifs = 0 # -- Load giflist[] with names of all gifs # dirL=os.listdir(pubdir) # dirL.sort() # dirL.reverse() rpo=os.popen('ls -t '+pubdir) dirL=splitfields(rpo.read(),'\n') rpo.close() currtime=time.time() for giffile in dirL: try: # -- If presentation gif, delete if more than 60 seconds old if giffile[-5] == 'P': ctime=os.stat(pubdir+'/'+giffile) if currtime - ctime[-1] > 60: os_unlink(pubdir+'/'+giffile) else: # -- Strip off "_ddhhmm.gif" suffix from gifpath gifbase=giffile[:-11] # -- If base of current gif is not already key in gifnamesD{}; make it so # -- else unlink if gifnamesD_has_key(gifbase): os_unlink(pubdir+'/'+giffile) rm_gifs = rm_gifs + 1 else: gifnamesD[gifbase]=giffile except: pass if rm_gifs: output('==\n== Replaced '+str(rm_gifs)+' gifs in '+pubdir+'\n') # -- Write out gifnamesD to module wfo=open(RT_HOME+'/lib/Trt_gifnamesD.py','w') wfo.write('gifnamesD='+str(gifnamesD)) wfo.close() os.rename(RT_HOME+'/lib/Trt_gifnamesD.py',RT_HOME+'/lib/rt_gifnamesD.py') # ---------------------------------- # -- Write quick view index page # ---------------------------------- # -- Get server name and directories server = RT_URL_BASE[:find(RT_URL_BASE,'/',8)] cgi_url = server+'/rt-cgi/' pub_url = RT_URL_BASE+'/pub/' outL = [] outL_append = outL.append # -- Write header for index page outL_append('rt_www Quick view index\n') outL_append('

Quick View Index to Stations

\n') # -- Init and load quick_viewD dictionary quick_viewD={'RT_URL_BASE':RT_URL_BASE , 'cgi_url':cgi_url} count=0 for i in range(len(sdatT)): if sdatT[i][0][0] in digits: stn = sdatT[i][0] name = sdatT[i][2] tmpL = [stn,name] try: tmpL.append( gifnamesD[stn+'_s'] ) except: pass try: tmpL.append( gifnamesD[stn+'_q'] ) except: pass quick_viewD[count] = tmpL outL_append(' '+stn+' '+name+'
\n') count = count + 1 # -- Write quick_view_index to disk wfo=open(RT_HOME+'/lib/quick_view_index','w') wfo.writelines(outL) wfo.close() # -- Write quick_viewD.py to disk wfo=open(RT_HOME+'/lib/Tquick_viewD.py','w') wfo.write('from string import atoi, splitfields; from os import environ; quick_viewD='+str(quick_viewD)) wfo.close() os.rename(RT_HOME+'/lib/Tquick_viewD.py',RT_HOME+'/lib/quick_viewD.py') # ====================================================================== # -- End misc function definitions # -- # -- Begin main # ====================================================================== def graph_update_main(cmd_line_stns,output,format,parm,force_graphs_on): #main_start=time.time() parm = upper(parm) # -- Set global graph-size variables global xpx_max, ypx_gif, ypx_diff, ypx_axis_top, ypx_axis_bot if format == 'presentation': xpx_max = 580 ypx_gif = 480 ypx_diff = 250 ypx_axis_top = 80 ypx_axis_bot = 330 else: xpx_max = 560 ypx_gif = 320 ypx_diff = 250 ypx_axis_top = 10 ypx_axis_bot = 260 # -- rt defaults poss_graph_typesD = {'Q':'Discharge', 'S':'Stage', 'R':'Rainfall'} num_q_graphs = num_s_graphs = num_r_graphs = 0 outgif='' # ---------------------------------------------------------------------- # -- Get graph period dates # ---------------------------------------------------------------------- # -- Set startdate and enddate as DateTime objects today=int(DateTime.now()) today=DateTime.DateTime(today) startdate =today-6 enddate =today+1 # -- Output banner dow,month_name,day,hrminsec,year=tuple(split(time.ctime(time.time()))) banner_time= dow+' '+month_name+' '+day+', '+year+' '+hrminsec[:5] output('========================================================================\n') output('== rt_graph_update.py -- rev '+str(rt_www_rev)+' '+str(banner_time)+'\n') output('== --------------------------------------------------------------------\n') # ------------------------------------------------------------------------ # -- Build graph_types list # ------------------------------------------------------------------------ graph_types=[] if 'Q' in GRAPH_LIST: graph_types.append('Q') if 'S' in GRAPH_LIST: graph_types.append('S') if 'R' in GRAPH_LIST: graph_types.append('R') if len(graph_types) == 0: output('ERROR!! NO GRAPH TYPES SPECIFIED IN CONFIG FILE; ABORTING \n') sys.exit(9) # -- If GRAPH_LIST is empty, rm all orphaned gifs for i in poss_graph_typesD.keys(): if i not in graph_types: try: os.system('rm '+pubdir+'/*_'+i+'* 2>/dev/null ') output('== Removing orphaned '+poss_graph_typesD[i]+' graphs\n') except: pass # -- If standalone presentation graph, reset graph_types[] if format == 'presentation': graph_types=[parm] # ------------------------------------------------------------------------ # -- Create RDB instance from rt_station.dat # ------------------------------------------------------------------------ sdat_recD={} rfo=open(RT_HOME+'/rt_station.dat','r') sdatT=RDB.Table(rfo) # -- Load sdat_recD dict; STATION_NUMBER:sdat_record_number for recno in range(len(sdatT)): stn=sdatT[recno].STATION_NUMBER if stn[0] in digits: sdat_recD[stn]=recno # ------------------------------------------------------------------------ # -- Determine station numbers that need updating: use # -- command-line arguments if present # ------------------------------------------------------------------------ new_data_stations=[] pt_del_rec=[] pt_recD={} use_newgraph_list=0 # -- Init dictionary to map most recent data date time tag to station number last_data_dtD={} # -- If command-line arg(s) if len(cmd_line_stns): # -- Get most recent data date-time for each station from cache cacheT=RDB.Table(RT_HOME+'/pub/rt_cache') for i in range(len(cacheT)): dt = cacheT[i].DATETIME modayhrmin = dt[1:3]+dt[4:6]+dt[7:9]+dt[-2:] last_data_dtD[cacheT[i].STATION_NUMBER]=modayhrmin # -- If first command-line arg = ALL; do all graphs if lower(cmd_line_stns[0]) == 'all': force_graphs_on='yes' for recno in range(len(sdatT)): stn = sdatT[recno].STATION_NUMBER if stn[0] in digits: new_data_stations.append(stn) # -- If command-line args not = all; treat as station numbers else: output('== '+str(len(cmd_line_stns))+' Station number(s) supplied on command line\n') output('== -- Forcing update of specified stations even if currently turned off\n') force_graphs_on='yes' # -- Make sure station number(s) supplied on command line are in rt_station.dat for i in range(len(cmd_line_stns)): if cmd_line_stns[i] in sdat_recD.keys(): new_data_stations.append(cmd_line_stns[i]) output('== '+str(cmd_line_stns[i])+' found in rt_station.dat file; OK to proceed\n') else: output('== '+str(cmd_line_stns[i])+' not found in rt_station.dat file; IGNORING\n') # -- If no command-line args, get new_data_stations from rt_newgraph.list else: # -- Get stations that need updating from rt_newgraph try: rfo=open(RT_HOME+'/rt_newgraph.list') use_newgraph_list = 1 newgraphL = splitfields(rfo.read(),'\n') uv_rdb_outfile = newgraphL[0] for line in newgraphL[1:-1]: stn, dt = tuple(splitfields(line,'\t')) new_data_stations.append(stn) modayhrmin = dt[5:7]+dt[8:10]+dt[11:13]+dt[14:16] last_data_dtD[stn]=modayhrmin rfo.close() os.unlink(RT_HOME+'/rt_newgraph.list') except: output('== '+RT_HOME+'/rt_newgraph.list not found\n') output('== 0 stations to process.....exiting!\n') output('============================ FINISHED ==================================\n\n\n') sys.exit() # -- Exit if no stations to process if not new_data_stations: output('== 0 stations to process -- EXITING!\n') output('============================ FINISHED ==================================\n\n\n') sys.exit(9) # -- Output to screen list of stations to process if use_newgraph_list: output('== '+rjust(str(len(new_data_stations)),3)+' stations in '+rjust(uv_rdb_outfile,50)+'\n') else: output('== --------- '+str(len(new_data_stations))+' stations to process --------\n') # -- Check that new_data_stations are in rt_station.dat; else delete from list new_data_stations_del_recnoL=[] for i in range(len(new_data_stations)): stn = new_data_stations[i] if sdat_recD.has_key(stn): output('== '+stn+' '+sdatT[sdat_recD[stn]].SITE_NAME_LONG+'\n') else: output('== '+stn+' *** NOT FOUND IN rt_station.dat FILE; SKIPPING \n') new_data_stations_del_recnoL.append(i) # -- Delete stations not in rt_station.dat from new_data_stations list if new_data_stations_del_recnoL: new_data_stations_del_recnoL.reverse() for recno in new_data_stations_del_recnoL: del new_data_stations[recno] output('==\n') # ------------------------------------------------------------------------ # -- Create RDB instance from dailystats file and load ds_recD dict # ------------------------------------------------------------------------ # -- Set pathname for dailystats file now=str(DateTime.now()) ds_file=RT_HOME+'/dailystats.'+now[:4]+'.'+now[5:7]+'.'+now[8:10] # -- Make sure dailystats file exists try: os.stat(ds_file) except posix.error: output('== <<< ERROR >>> \n '+ds_file+' does not exist \n ABORTING!!!!\n') sys.exit(9) # -- Open ds_file and create RDB instance ds_T rfo=open(ds_file) dsT=RDB.Table(rfo) # -- Map station number to ds record number (recno) ds_recD={} for recno in range(len(dsT)): stn =dsT[recno][0] ds_recD[stn] =recno # ------------------------------------------------------------------------ # -- Report to screen # ------------------------------------------------------------------------ output('== Period: '+str(startdate)+'-'+str(enddate)+'; Statistic = '+STATISTIC+ \ '; Types: '+GRAPH_LIST+'\n') output('== ====================================================================\n') #output('== Main init time = '+str(round(time.time() - main_start,3))+'\n') # ================================================================================== # ================ MAIN LOOP THROUGH STATIONS ==================== # ================================================================================== count=0 for stn in new_data_stations: count=count+1 stn_start=time.time() # -- Temporary defaults stats=[] stats_type='none' # -- Get values from station_dat file SNL =sdatT[sdat_recD[stn]].SITE_NAME_LONG FLOODSTAGE =sdatT[sdat_recD[stn]].FLOODSTAGE Q_AT_FLOODSTAGE=sdatT[sdat_recD[stn]].Q_AT_FLOODSTAGE # -- Get last_data_dt string for gif file name last_data_dt = last_data_dtD[stn] output('==\n== '+str(count)+' '+stn+' '+SNL+'\n') output('== --------------------------------------------\n') # ---------------------------------------------------------------------- # -- Open .dat data file and read into memory # ---------------------------------------------------------------------- try: rfo=open(datadir+'/'+stn+'.dat') uvL=splitfields(rfo.read(),'\n') # -- Step over comments and get names for rec in range(50): if uvL[rec][0] <> '#': uv_names=splitfields(uvL[rec],'\t') rec=rec+2 break # -- Delete non-data from list del uvL[0:rec] del uvL[-1] uvlen=len(uvL) except: uvlen=0 output('== Datafile not found -- will treat same as \n') # -- If datafile empty, catch it here if not uvlen: output('== Datafile empty; removing existing gifs; skipping to next site\n') try: os_system('rm '+pubdir+'/'+stn+'*.gif 2> /dev/null') except: pass else: # -- Split into nested list uvL=map(splitfields,uvL,'\t'*uvlen) # -------------------------------------------------------------- # -- Load datetime list dt_xpx, determine record breaks, # -- remove lines with invalid date strings from uvL # -------------------------------------------------------------- dt_xpx, uv_ranges, uvlen, uvL = make_dt_xpx(uvL, uvlen, startdate, output) # -- If make_dt_xpx() removes all data, catch it here if not uvlen: output('== All data preceeds startdate; removing existing gifs; skipping to next site\n') try: os_system('rm '+pubdir+'/'+stn+'*.gif 2> /dev/null') except: pass # -- If we still have data, keep going if uvlen: # ------------------------------------------------------------ # -- Load parameter lists # ------------------------------------------------------------ # -- Rainfall # -- Determine whether to proceed proceed=0 if 'R' in graph_types: if 'p00045' in uv_names: ic_p00045=uv_names.index('p00045') # -- If last val is non-null and character, parm is turned off if uvL[-1][ic_p00045]: try: xx=atof(uvL[-1][ic_p00045]) proceed=1 except ValueError: output('== Rainfall currently turned off \n') proceed=0 if force_graphs_on == 'yes': output('== --forcing graph generation anyway\n') proceed=1 else: # -- Last val null; therefore not turned off; therefore proceed proceed=1 else: output('== Parameter p00045 not found in '+datadir+'/'+stn+'.dat\n== ...skipping\n') proceed=0 if proceed: # ------------------------------------------------------------ # -- Convert parameter vals to float and get comments for # -- dates that are turned off # ------------------------------------------------------------ uvL, p00045L, DT_cmtD, DT_errD = makefloat(uvL, uvlen, ic_p00045, output) if not len(p00045L): output('== No valid data for last 7 days, skipping\n') try: os_system('rm '+pubdir+'/'+stn+'_r*.gif 2> /dev/null') except: pass else: # -- Normalize data to start at zero # -- Find start value for j in range(len(p00045L)): try: junk = p00045L[j] - 1 p00045_start = p00045L[j] break except TypeError: pass # -- Subtract start value from p00045L for j in range(len(p00045L)): try: p00045L[j] = p00045L[j] - p00045_start # -- If result less than zero, set equal to zero # -- so p_min will not be negative if p00045L[j] < 0: p00045L[j] = 0 except TypeError: pass # -- Subtract start value from uvL at p00045 position for j in range(len(uvL)): try: uvL[j][ic_p00045] = uvL[j][ic_p00045] - p00045_start # -- If result less than zero, set to 'N' to # -- force exception in gen_segs loop if uvL[j][ic_p00045] < 0: uvL[j][ic_p00045] = 'N' except TypeError: pass num_r_graphs = num_r_graphs+1 # ------------------------------------------------------------ # -- Determine y_min and y_max from p00045_min and p00045_max # ------------------------------------------------------------ p00045_min = min(p00045L) p00045_max = max(p00045L) y_max, y_min, y_step=y_maxmin_arith('p00045', p00045_max, p00045_min, output) # ------------------------------------------------------------ # -- Build p00045 segments list for plotting # ------------------------------------------------------------ p00045_segs=gensegs_arith(uvL, dt_xpx, y_max, y_min, uv_ranges, ic_p00045, output) # ------------------------------------------------------------ # -- Call gen_gif # ------------------------------------------------------------ y_diff = y_max - y_min ypx_flood = 0 stats = [] stats_type = '' min_period = 0 outgif=gen_gif(stn, SNL, 'p00045', p00045_segs, 'arith', y_max, y_min, \ y_step, ypx_flood, FLOODSTAGE_TEXT, stats, stats_type, \ min_period, last_data_dt, output, format, DT_cmtD, DT_errD) # ---------------------------------------------------------------------- # -- Stage # -- Determine whether to proceed proceed=0 if 'S' in graph_types: if 'p00065' in uv_names: ic_p00065=uv_names.index('p00065') # -- If last val is non-null and character, parm is turned off if uvL[-1][ic_p00065]: try: xx=atof(uvL[-1][ic_p00065]) proceed=1 except ValueError: output('== Stage currently turned off \n') proceed=0 if force_graphs_on == 'yes': output('== --forcing graph generation anyway\n') proceed=1 else: # -- Last val null; therefore not turned off; therefore proceed proceed=1 else: output('== Parameter p00065 not found in '+datadir+'/'+stn+'.dat\n== ...skipping\n') proceed=0 if proceed: # ------------------------------------------------------------ # -- Convert parameter vals to float and get comments for # -- dates that are turned off # ------------------------------------------------------------ uvL, p00065L, DT_cmtD, DT_errD = makefloat(uvL, uvlen, ic_p00065, output) if not len(p00065L): output('== No valid data for last 7 days, skipping\n') try: os_system('rm '+pubdir+'/'+stn+'_s*.gif 2> /dev/null') except: pass else: num_s_graphs = num_s_graphs+1 # ------------------------------------------------------------ # -- Determine y_min and y_max from p00065_min and p00065_max # ------------------------------------------------------------ p00065_min = min(p00065L) p00065_max = max(p00065L) # -- If floodstage below minimum value, make sure it shows if 0 < FLOODSTAGE < p00065_min: p00065_min = FLOODSTAGE output('== Warning: FLOODSTAGE less than all data values\n') output('== ..setting p00065_min = FLOODSTAGE = '+str(FLOODSTAGE)+'\n') y_max, y_min, y_step=y_maxmin_arith('p00065', p00065_max, p00065_min, output) # ------------------------------------------------------------ # -- Build p00065 segments list for plotting # ------------------------------------------------------------ p00065_segs=gensegs_arith(uvL, dt_xpx, y_max, y_min, uv_ranges, ic_p00065, output) # ------------------------------------------------------------ # -- Get FLOODSTAGE ypx # ------------------------------------------------------------ y_diff = y_max - y_min ypx_flood=0 if FLOODSTAGE > 0: ypx_flood=ypx_axis_bot-int((FLOODSTAGE-y_min)/y_diff*ypx_diff) stats=[] stats_type='' min_period=0 outgif=gen_gif(stn, SNL, 'p00065', p00065_segs, 'arith', y_max, y_min, \ y_step, ypx_flood, FLOODSTAGE_TEXT, stats, stats_type, \ min_period, last_data_dt, output, format, DT_cmtD, DT_errD) # ---------------------------------------------------------------------- # -- Do p00060 # ---------------------------------------------------------------------- # -- Discharge # -- Determine whether to proceed proceed=0 if 'Q' in graph_types: if 'p00060' in uv_names: ic_p00060=uv_names.index('p00060') # -- If last val is non-null and character, parm is turned off if uvL[-1][ic_p00060]: try: xx=atof(uvL[-1][ic_p00060]) proceed=1 except ValueError: output('== Discharge currently turned off \n') proceed=0 if force_graphs_on == 'yes': output('== --forcing graph generation anyway\n') proceed=1 else: # -- Last val null; therefore not turned off; therefore proceed proceed=1 else: output('== Parameter p00060 not found in '+datadir+'/'+stn+'.dat\n== ...skipping\n') proceed=0 if proceed: # ------------------------------------------------------------ # -- Convert parameter vals to float and get comments for # -- dates that are turned off # ------------------------------------------------------------ graph_type= 'log' y_step = 0 ic_p00060 = uv_names.index('p00060') uvL, p00060L, DT_cmtD, DT_errD = makefloat(uvL, uvlen, ic_p00060, output) if not len(p00060L): output('== No valid data for last 7 days, skipping\n') try: os_system('rm '+pubdir+'/'+stn+'_q*.gif 2> /dev/null') except: pass else: num_q_graphs=num_q_graphs+1 # ------------------------------------------------------------ # -- Build stats_vals list and append p00060L # ------------------------------------------------------------ stats_vals=[] if ds_recD.has_key(stn): ds_recno=ds_recD[stn] for j in range(1,9): flow=dsT[ds_recno][j] stats_vals.append(flow) if str(flow): p00060L.append(flow) min_period=int(dsT[ds_recno][9]) else: # -- Station number was not found in daily stats file output('==\n==\n== ******** WARNING ********\n') output('== STATION '+str(stn)+' NOT IN '+ ds_file+'\n') output('== GRAPH WILL NOT SHOW LONG-TERM FLOW VALUES \n\n') # ------------------------------------------------------------ # -- Determine y_min and y_max from p00060_min and p00060_max # ------------------------------------------------------------ p00060_min=min(p00060L) p00060_max=max(p00060L) p00060_diff=p00060_max - p00060_min # -- If floodstage below minimum value, make sure it shows if 0 < Q_AT_FLOODSTAGE < p00060_min: p00060_min = Q_AT_FLOODSTAGE output('== Warning: Q_AT_FLOODSTAGE less than all data values\n') output('== ..setting p00060_min = Q_AT_FLOODSTAGE = '+str(Q_AT_FLOODSTAGE)+'\n') if p00060_min >= .01: y_max, y_min = y_maxmin_log('p00060',p00060_max, p00060_min, output) else: y_max, y_min, y_step = y_maxmin_arith('p00060', p00060_max, p00060_min, output) output('== Warning: y_min less than 0.01, plotting y_axis as arithmetic \n') graph_type='arith' # ------------------------------------------------------------ # -- Build p00060 segments list for plotting # ------------------------------------------------------------ if p00060_min >= 0.01: p00060_segs=gensegs_log(uvL, dt_xpx, y_max, y_min, uv_ranges, ic_p00060, output) else: p00060_segs=gensegs_arith(uvL, dt_xpx, y_max, y_min, uv_ranges, ic_p00060, output) graph_type='arith' # ------------------------------------------------------------ # -- Build stats (ypx) list for plotting # ------------------------------------------------------------ stats=['']*8 if graph_type == 'log': y_min_log= log10(y_min) y_diff_log=log10(y_max)-log10(y_min) for i in range(len(stats_vals)): val=stats_vals[i] if str(val): stats[i]=ypx_axis_bot-int((log10(val)-y_min_log)/y_diff_log*ypx_diff) else: # -- type arith y_diff=y_max-y_min for i in range(len(stats_vals)): val=stats_vals[i] if str(val): stats[i]=ypx_axis_bot-int((val-y_min)/y_diff*ypx_diff) stats.append(min_period) # ------------------------------------------------------------ # -- Get Q_AT_FLOODSTAGE ypx # ------------------------------------------------------------ if Q_AT_FLOODSTAGE > 0: if graph_type == 'log': ypx_flood=ypx_axis_bot-int((log10(Q_AT_FLOODSTAGE)-y_min_log)/y_diff_log*ypx_diff) else: # -- type arith ypx_flood=ypx_axis_bot-int((Q_AT_FLOODSTAGE-y_min)/y_diff*ypx_diff) else: ypx_flood=0 outgif=gen_gif(stn, SNL, 'p00060', p00060_segs, graph_type, y_max, y_min, y_step, \ ypx_flood, Q_AT_FLOODSTAGE_TEXT, stats, STATISTIC, min_period, \ last_data_dt, output, format, DT_cmtD, DT_errD) #output('== Station time = '+str(round(time.time() - stn_start,3))+'\n') # -- If not presentation graph, remove old gifs if format <> 'presentation': rmgif_start=time.time() rm_gifs(sdatT, output) output('== rm_gif time = '+str(round(time.time() - rmgif_start,3))+'\n') # ------------------------------------------------------------------------ # -- Finish up # ------------------------------------------------------------------------ # -- Get end time mark runsecs=round((time.time()-startsecs)+1,1) # -- Get per station seconds try: per_stat_secs=runsecs/len(new_data_stations) except ZeroDivisionError: per_stat_secs=0 per_stat_secs=str(round(per_stat_secs,1)) # -- Get per graph seconds try: per_graph_secs=runsecs/(num_q_graphs + num_s_graphs) except ZeroDivisionError: per_graph_secs=0 per_graph_secs=str(round(per_graph_secs,1)) # -- Format output strings runsecs =str(runsecs) num_stations =str(len(new_data_stations)) num_s_graphs =str(num_s_graphs) num_q_graphs =str(num_q_graphs) summary= '== ' \ +str(num_stations)+' stations, (' \ +str(num_s_graphs)+' stage, ' \ +str(num_q_graphs)+' discharge, '\ +str(num_r_graphs)+' rainfall)\n== Total time '+runsecs+' sec ('+per_graph_secs+'/graph)\n' output(summary) output('============================ FINISHED ==================================\n\n\n') if outgif: return outgif else: return #db # -- Resize and write title #db name=str(stn)+' '+name #db xpx_size,ypx_size=gd_fontstrsize(gd_font,name) #db xpx_title=ypx_axis_bot-((xpx_size*1.3)/2)-10 #db #db imgtmp=gd.image((400,13)) #db tmp_true_black = imgtmp.colorAllocate((0,0,0)) #db imgtmp.colorTransparent(tmp_true_black) #db #db tmpblack = imgtmp.colorAllocate((0,0,1)) #db #db # tmpwhite = imgtmp.colorAllocate((255,255,255)) #db # imgtmp.filledRectangle((0,0),(400,13),tmpwhite) #db #db imgtmp.string(gd_font, (1,1), name, tmpblack) #db imgtmp.copyResizedTo(img,(xpx_title,ypx_axis_top-21),(0,0),(600,20),)