Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

List of functionalities shared by sensors #123

Open
matthew-schultz opened this issue Oct 25, 2019 · 1 comment
Open

List of functionalities shared by sensors #123

matthew-schultz opened this issue Oct 25, 2019 · 1 comment
Assignees

Comments

@matthew-schultz
Copy link
Member

Bullet list of functionalities a new sensor would like to have, but not necessarily needed to get data:

  • error_log id
  • database
  • verbose flag
  • txt error.log file

Add to wiki:

  • DB Schema for each bullet
    • fade the tables that do not get affected
    • highlight those that do
      Should be useful for future users of lonoa to understand what db columns the scripts do and do not use.
@matthew-schultz
Copy link
Member Author

matthew-schultz commented Dec 25, 2019

@carlosparadis just wanted to annotate the shared current parts of the code in the master branch that we talked about before I head out for Christmas vacation

error_logging

API_EGAUGE
get_data_from_api()

for purpose_sensor in purpose_sensors:
error_log_row = orm_egauge.ErrorLog(purpose_id=purpose_sensor.purpose_id, datetime=current_time, was_success=True, pipeline_stage=orm_egauge.ErrorLog.PipelineStageEnum.data_acquisition)
conn.add(error_log_row)
conn.commit()

insert_readings_into_database()
error_log_row = orm_egauge.ErrorLog(purpose_id=purpose_sensor.purpose_id, datetime=current_time, pipeline_stage=orm_egauge.ErrorLog.PipelineStageEnum.database_insertion, was_success=True)
conn.add(error_log_row)
# need to flush and refresh to get error_log_row.log_id
conn.flush()
conn.refresh(error_log_row)
# update current set of readings with related log_id
conn.query(orm_egauge.Reading.log_id).\
filter(orm_egauge.Reading.purpose_id == purpose_sensor.purpose_id,
orm_egauge.Reading.upload_timestamp == current_time).\
update({'log_id':error_log_row.log_id})
logging.info(str(rows_inserted) + ' readings(s) attempted to be inserted by ' + SCRIPT_NAME + ' for purpose id ' + str(purpose_sensor.purpose_id))
conn.commit()

log_failure_to_connect_to_api()
for purpose_id in purpose_ids:
error_log_row = orm_egauge.ErrorLog(datetime=current_time, error_type=exception.__class__.__name__, pipeline_stage=orm_egauge.ErrorLog.PipelineStageEnum.data_acquisition, purpose_id=purpose_id, was_success=False)
conn.add(error_log_row)
conn.commit()

log_failure_to_connect_to_database()
for purpose_sensor in purpose_sensors:
error_log_row = orm_egauge.ErrorLog(datetime=current_time, error_type=exception.__class__.__name__, purpose_id=purpose_sensor.purpose_id, pipeline_stage=orm_egauge.ErrorLog.PipelineStageEnum.database_insertion, was_success=False)
conn.add(error_log_row)
conn.commit()


API_WEBCTRL
get_data_from_api()

conn.add(error_log_row)
conn.commit()
return readings
else:
readings.raise_for_status()

insert_readings_into_database()
error_log_row = orm_webctrl.ErrorLog(datetime=current_time, was_success=True, purpose_id=sensor.purpose_id, pipeline_stage=orm_webctrl.ErrorLog.PipelineStageEnum.database_insertion)
conn.add(error_log_row)
# need to flush and refresh to get error_log_row.log_id
conn.flush()
conn.refresh(error_log_row)
# update current set of readings with related log_id
conn.query(orm_webctrl.Reading.log_id). \
filter(orm_webctrl.Reading.purpose_id == sensor.purpose_id,
orm_webctrl.Reading.upload_timestamp == current_time). \
update({'log_id': error_log_row.log_id})
conn.commit()

log_failure_to_connect_to_api()
error_log_row = orm_webctrl.ErrorLog(datetime=current_time, error_type=exception.__class__.__name__, pipeline_stage=orm_webctrl.ErrorLog.PipelineStageEnum.data_acquisition, purpose_id=sensor.purpose_id, was_success=False)
conn.add(error_log_row)
conn.commit()

log_failure_to_connect_to_database()
error_log_row = orm_webctrl.ErrorLog(datetime=current_time, error_type=exception.__class__.__name__, pipeline_stage=orm_webctrl.ErrorLog.PipelineStageEnum.database_insertion, purpose_id=sensor.purpose_id, was_success=False)
conn.add(error_log_row)
conn.commit()


EXTRACT_HOBO
get_csv_from_folder_not_in_db()

for sensor_info_row in sensor_info_rows:
error_log_row = orm_hobo.ErrorLog(was_success=True, purpose_id=sensor_info_row.purpose_id, datetime=current_time, pipeline_stage=orm_hobo.ErrorLog.PipelineStageEnum.data_acquisition)
conn.add(error_log_row)

insert_csv_readings_into_db()
error_log_row = orm_hobo.ErrorLog(was_success=True, purpose_id=sensor_info_row.purpose_id, datetime=current_time, pipeline_stage=orm_hobo.ErrorLog.PipelineStageEnum.database_insertion)
conn.add(error_log_row)

log_failure_to_get_csv_readings_from_folder_not_in_db()
error_log_row = orm_hobo.ErrorLog(was_success=False, datetime=current_time, error_type=exception.__class__.__name__, pipeline_stage=orm_hobo.ErrorLog.PipelineStageEnum.data_acquisition)
conn.add(error_log_row)
# need to flush and refresh to get error_log_row.log_id
conn.flush()
conn.refresh(error_log_row)
csv_filename_row = orm_hobo.ErrorLogDetails(log_id=error_log_row.log_id, information_type="csv_filename", information_value=csv_filename)
conn.add(csv_filename_row)
conn.commit()

log_failure_to_insert_csv_readings_into_db()
if not new_readings:
error_log_row = orm_hobo.ErrorLog(purpose_id=sensor_info_row.purpose_id, datetime=current_time, error_type=exception.__class__.__name__,pipeline_stage=orm_hobo.ErrorLog.PipelineStageEnum.database_insertion)
# set was_success to False if readings were new but an error was thrown
else:
error_log_row = orm_hobo.ErrorLog(purpose_id=sensor_info_row.purpose_id, was_success=False, datetime=current_time, error_type=exception.__class__.__name__, pipeline_stage=orm_hobo.ErrorLog.PipelineStageEnum.database_insertion)
conn.add(error_log_row)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Development

No branches or pull requests

2 participants