Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_rearrenge_episodes_new_schedule(self):
Schedule.objects.create(
programme=self.programme,
schedule_board=ScheduleBoard.objects.create(),
type="L",
recurrences= recurrence.Recurrence(
dtstart=datetime.datetime(2015, 1, 3, 16, 0, 0),
dtend=datetime.datetime(2015, 1, 31, 16, 0, 0),
rrules=[recurrence.Rule(recurrence.WEEKLY)]))
utils.rearrange_episodes(self.programme, pytz.utc.localize(datetime.datetime(2015, 1, 1)))
self.assertListEqual(
map(lambda e: e.issue_date, self.programme.episode_set.all().order_by('issue_date')[:5]),
[
datetime.datetime(2015, 1, 1, 14, 0),
datetime.datetime(2015, 1, 2, 14, 0),
datetime.datetime(2015, 1, 3, 14, 0),
datetime.datetime(2015, 1, 3, 16, 0),
datetime.datetime(2015, 1, 4, 14, 0)
]
def get_ad_day():
date = pytz.utc.localize(datetime.datetime.utcnow())
day = datetime.datetime(
year=date.year,
month=date.month,
day=date.day,
tzinfo=pytz.utc,
)
return day
def _default_event_start():
return pytz.utc.localize(
datetime.combine(current_timestamp(), datetime.min.time()))
def parse_date(indate: str) -> datetime:
'''parse date
Args:
indate (string, int, or datetime): input to convert to datetime
Returns:
datetime
'''
if isinstance(indate, datetime):
return indate
try:
date = datetime.utcfromtimestamp(float(indate))
date = pytz.utc.localize(date).astimezone(pytz.timezone('EST')).replace(tzinfo=None)
except ValueError:
date = datetime.strptime(indate, "%Y-%m-%dT%H:%M:%S.%fZ")
return date
state=SUCCESS_STATE, since=self.since):
episode_case_id = repeat_record.payload_id
episode_case = case_accessor.get_case(episode_case_id)
assert repeat_record.succeeded
time_of_notification = pytz.utc.localize(repeat_record.last_checked).astimezone(timezone)
# assert that
# the last notification was the success one and
# the time for last notification is same as that for the repeat record
last_notification_attempt = repeat_record.attempts[-1]
assert last_notification_attempt.succeeded
assert repeat_record.last_checked == last_notification_attempt.datetime
property_changed_info = get_latest_property_change_to_value(episode_case,
"treatment_initiated",
"yes_phi")
xform = property_changed_info.transaction.form
form_received_on = pytz.utc.localize(xform.received_on).astimezone(timezone)
property_modified_on = parse_datetime(property_changed_info.modified_on).astimezone(timezone)
writer.writerow({
'nikshay id': episode_case.get_case_property('nikshay_id'),
'form finished on': property_modified_on.strftime('%Y-%m-%d-%H:%M:%S'),
'form submitted on': form_received_on.strftime('%Y-%m-%d-%H:%M:%S'),
'notification completed on': time_of_notification.strftime('%Y-%m-%d-%H:%M:%S'),
'form to submission': (form_received_on - property_modified_on),
'submission to notification': (time_of_notification - form_received_on),
'case id': episode_case.case_id
})
if email_to:
email_to = list(email_to) if not isinstance(email_to, six.string_types) else [email_to]
csvfile = open(result_file_name)
email = EmailMessage(
subject="Nikshay Registration Notification Time Report",
user = User.objects.get(_guid__guid=modm_user._id)
except User.DoesNotExist:
user_fk_nodes = process_node_fk_fields(modm_user)
user_m2m_nodes = process_node_m2m_fields(modm_user)
user_fk_users = process_user_fk_fields(modm_user)
# user_m2m_users = process_user_m2m_fields(modm_user)
user_m2m_tags = process_tag_m2m_fields(modm_user)
user_fields = {}
user_fields['_guid'] = Guid.objects.get(guid=modm_user._id)
user_fields.update(modm_user.to_storage())
user_fields.update(user_fk_nodes)
user_fields.update(user_fk_users)
user_fields = {k: v for k, v in user_fields.iteritems() if v is not None}
for k, v in user_fields.iteritems():
if isinstance(v, datetime):
user_fields[k] = pytz.utc.localize(v)
user = User.objects.create(**{key: user_fields[key] for key in user_fields if key not in user_key_blacklist})
global users
users += 1
set_m2m_fields(user, user_m2m_nodes)
# set_m2m_fields(user, user_m2m_users)
set_m2m_fields(user, user_m2m_tags)
all_of_the_things[modm_user._id] = user
return user
def start_time(timezone=None):
"""Returns the time the interview was started, as a DADateTime object."""
if timezone is not None:
return dd(pytz.utc.localize(this_thread.internal['starttime']).astimezone(pytz.timezone(timezone)))
else:
return dd(pytz.utc.localize(this_thread.internal['starttime']).astimezone(pytz.utc))
def _mangle_prefix(res):
""" Mangle prefix result
"""
# fugly cast from large numbers to string to deal with XML-RPC
res['total_addresses'] = unicode(res['total_addresses'])
res['used_addresses'] = unicode(res['used_addresses'])
res['free_addresses'] = unicode(res['free_addresses'])
# postgres has notion of infinite while datetime hasn't, if expires
# is equal to the max datetime we assume it is infinity and instead
# represent that as None
if res['expires'].tzinfo is None:
res['expires'] = pytz.utc.localize(res['expires'])
if res['expires'] == pytz.utc.localize(datetime.datetime.max):
res['expires'] = None
return res
Arguments:
t: a datetime object. It may be in any pytz time zone, or it may be
timezone-naive (interpreted as UTC).
tzinfo: a pytz timezone object, or None.
Returns:
a datetime object in the time zone 'tzinfo'
"""
if pytz is None:
return t.replace(tzinfo=None)
elif tzinfo:
if not t.tzinfo:
t = pytz.utc.localize(t)
return tzinfo.normalize(t.astimezone(tzinfo))
elif t.tzinfo:
return pytz.utc.normalize(t.astimezone(pytz.utc)).replace(tzinfo=None)
else:
return t
logging.info("Searching through running processes...")
airflow_timezone_not_required_versions = ['1.7', '1.8', '1.9']
processes_to_kill = []
for line in search_output_filtered:
logging.info("")
process = parse_process_linux_string(line=line)
logging.info("Checking: " + str(process))
exec_date_str = (process["airflow_execution_date"]).replace("T", " ")
if '.' not in exec_date_str:
exec_date_str = exec_date_str + '.0' # Add milliseconds if they are missing.
execution_date_to_search_for = datetime.strptime(exec_date_str, '%Y-%m-%d %H:%M:%S.%f')
# apache-airflow version >= 1.10 requires datetime field values with timezone
if airflow_version[:3] not in airflow_timezone_not_required_versions:
execution_date_to_search_for = pytz.utc.localize(execution_date_to_search_for)
logging.info("Execution Date to Search For: " + str(execution_date_to_search_for))
# Checking to make sure the DAG is available and active
if DEBUG:
logging.info("DEBUG: Listing All DagModels: ")
for dag in session.query(DagModel).all():
logging.info("DEBUG: dag: " + str(dag) + ", dag.is_active: " + str(dag.is_active))
logging.info("")
logging.info("Getting dag where DagModel.dag_id == '" + str(process["airflow_dag_id"]) + "'")
dag = session.query(DagModel).filter(
DagModel.dag_id == process["airflow_dag_id"]
).first()
logging.info("dag: " + str(dag))
if dag is None:
kill_reason = "DAG was not found in metastore."