Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
newest_date = timezone.now() - timedelta(weeks=to_weeks)
query = AssetDiff.objects.all()
query = query.filter(date__gte=oldest_date)
query = query.filter(date__lte=newest_date)
if not show_in_space:
query = query.filter(stationID__lt=constants.MAX_STATION_ID)
if not show_in_stations:
query = query.filter(stationID__gt=constants.MAX_STATION_ID)
if divisions is not None:
query = query.filter(hangarID__in=divisions)
dates = []
for date in query.values_list('date', flat=True).distinct().order_by('-date'):
dates.append({
'value' : datetime.strftime(date, DATE_PATTERN),
'show' : print_time_min(date),
})
return HttpResponse(json.dumps(dates))
for key in yearly_archives:
archive_out_dir = os.path.join(out_dir, str(key), 'index.html')
with open(archive_out_dir, 'w', 'utf-8') as f:
f.write(template.render(blog=blog.config,
posts=yearly_archives[key], archive_name=key, yearly=True,
pages=yearly_archive_pages))
# Render the monthly archive pages
template = env.get_template('archive.html')
for key in monthly_archives:
archive_out_dir = os.path.join(out_dir, str(key.year),
str(key.month).rjust(2, '0'), 'index.html')
with open(archive_out_dir, 'w', 'utf-8') as f:
f.write(template.render(blog=blog.config,
posts=monthly_archives[key],
archive_name=datetime.strftime(key, "%B %Y"),
pages=monthly_archive_pages))
# Edit ref(a, img) src for the front page and the ATOM feed
for post in blog.posts:
soup = BeautifulSoup(post.html)
refs = soup.findAll('a') + soup.findAll('img')
for ref in refs:
try:
link = ref['src']
img = True
except KeyError:
try:
link = ref['href']
img = False
except KeyError:
continue
exchange = context.get("exchange").title()
base_currency = context.get("trade_currency").upper()
quote_currency = context.get("quote_currency").upper()
capital_base = context.get("capital_base")
trade_pair = f"{base_currency}_{quote_currency}".lower()
hours = int(context.get("hours"))
start = datetime.datetime.utcnow()
end = start + datetime.timedelta(hours=hours)
if strat in choices.ML_MODELS:
strat_dict = {"trading": {}, "models": [{"name": strat}]}
else:
strat_dict = {"trading": {}, "indicators": [{"name": strat}]}
strat_dict["trading"]["START"] = datetime.datetime.strftime(start, "%Y-%m-%d-%H-%M")
strat_dict["trading"]["END"] = datetime.datetime.strftime(end, "%Y-%m-%d %H:%M:%S")
strat_dict["trading"]["EXCHANGE"] = exchange
strat_dict["trading"]["ASSET"] = trade_pair
strat_dict["trading"]["CAPITAL_BASE"] = float(capital_base)
strat_dict["trading"]["QUOTE_CURRENCY"] = quote_currency.lower()
strat_dict["name"] = f"{strat}-{mode.title()}"
return strat_dict
def save_extras_to_redis(self):
# This saves the current (presumably complete) object as the 'current' point to redis
r = redis.Redis(host=settings.REDIS_HOSTNAME, port=settings.REDIS_PORT, password=settings.REDIS_PASSWORD)
datetime_string = datetime.datetime.strftime(timezone.now(), "%c")
extras = {
'rssi': getattr(self, 'rssi', None),
'raw_gravity': getattr(self, 'raw_gravity', None),
'raw_temp': getattr(self, 'raw_temp', None),
'saved_at': datetime_string,
}
r.set('tilt_{}_extras'.format(self.color), json.dumps(extras).encode(encoding="utf-8"))
def write_counts(bcb, level="gene"):
"""
pull counts and metadata out of the bcbioRNASeq object
"""
date = dt.strftime(dt.now(), "%Y-%m-%d")
out_dir = os.path.join(os.path.dirname(bcb), "..", "results", date, level, "counts")
out_dir_string = _quotestring(out_dir)
out_file = os.path.join(out_dir, "counts.csv.gz")
safe_makedir(out_dir)
if file_exists(out_file):
return out_file
bcb_string = _quotestring(bcb)
rcmd = Rscript_cmd()
render_string = (
f'load({bcb_string});'
f'date=format(Sys.time(), "%Y-%m-%d");'
f'dir={out_dir_string};'
f'library(tidyverse);'
f'library(bcbioRNASeq);'
f'counts = bcbioRNASeq::counts(bcb) %>% as.data.frame() %>% round() %>% tibble::rownames_to_column("gene");'
f'metadata = colData(bcb) %>% as.data.frame() %>% tibble::rownames_to_column("sample");'
def _defaults(core_dict):
# -------------------------------------------------------------------------
# DEFAULTS
# -------------------------------------------------------------------------
if core_dict['init'] == 'Histogram' or core_dict['init'] == 'Variable':
# data folder to save data files in; uses a timestamp for a label
core_dict.setdefault('data_folder',
"Data/{}".format(core_dict['init'][:4] + " " + datetime.strftime(
datetime.now(), '%Y-%m-%d %H_%M_%S')))
core_dict.setdefault('Animate', False)
for sub_dict in core_dict['sim']:
if core_dict['init'] == 'Visualize':
sub_dict.setdefault('NoAxesTick', False)
sub_dict.setdefault('HelpLines', True)
sub_dict.setdefault('FPS', 20)
if sub_dict['Bandit']['ArmList'][0][0] == 'Linear':
sub_dict.setdefault('Normalized', False)
return None
u,v,w = self.getVector()
self.loadData()
# Find the colorbar limits if unspecified
if self.clim==None:
self.clim=[]
self.clim.append(np.min(self.data))
self.clim.append(np.max(self.data))
# Generate the initial plot
points = np.column_stack((self.xp,self.yp,0.0*self.xp))
titlestr='%s [%s]\n Time: %s'%(self.long_name,self.units,\
datetime.strftime(self.time[self.tstep[0]],'%d-%b-%Y %H:%M:%S'))
mlab.figure(size=figsize)
self.fig,self.h,ug,d,title = unsurfm(points,self.cells,np.array(self.data[0,:]),clim=self.clim,title=titlestr)
if vector_overlay:
# Add vectorss to the unctructured grid object
#ug.cell_data.vectors=np.asarray((u[0,:],v[0,:],w[0,:])).T
#ug.cell_data.vectors.name='vectors'
#d.update()
#h2=mlab.pipeline.vectors(d,color=(0,0,0),mask_points=1,scale_factor=1./scale)
vec=mlab.pipeline.vector_scatter(self.xv, self.yv, self.yv*0, u[0,:], v[0,:], w[0,:])
h2=mlab.pipeline.vectors(vec,color=(0,0,0),mask_points=subsample,scale_factor=1./scale,scale_mode='vector')
# Animate the plot by updating the scalar data in the unstructured grid object
# for ii in range(nt):
def check_if_date_in_last_week(self, date):
"""
checks if the date is in last 7 days excluding today
:param date: user last activity date
:return: Returns if the user has an activity in dashboard in last 7 days
"""
if date is None:
return 'N/A', False
date = self.convert_utc_to_ist(date)
date_formatted = datetime.datetime.strftime(date, "%d/%m/%Y, %I:%M %p")
now = datetime.datetime.now(INDIA_TIMEZONE)
yesterday_date = (now - relativedelta(days=1)).date()
seven_days_before_date = (now - relativedelta(days=8)).date()
return date_formatted, seven_days_before_date <= date.date() <= yesterday_date
def updateRepresentation(self):
xml = nmlxml.nsiXML(self.nsi_agent, self.nml_network, self.route_vectors)
self.topology_representation = ET.tostring(xml)
self.topology_version = self.nml_network.version.replace(microsecond=0)
self.topology_version_http = datetime.datetime.strftime(self.nml_network.version, self.RFC850_FORMAT)
def _build_date(self, date):
'''Costruzione di una stringa che rappresenta una data, con il
fuso orario corretto
'''
if date is None: return None
if date.tzinfo is None:
tzsec = time.localtime().tm_gmtoff
tz = "{:+03d}:{:02d}".format(int(tzsec / 3600), abs(tzsec) % 3600)
return datetime.strftime(date, "%Y-%m-%dT%H:%M:%S") + tz
tzw = datetime.strftime(date, "%z")
tz = "{}:{}".format(tzw[:3], tzw[3:])
return datetime.strftime(date, "%Y-%m-%dT%H:%M:%S") + tz