Automatically remove future events which are going to be added to prevent overlap

This commit is contained in:
Oliver Bell
2018-10-02 13:52:37 +01:00
parent e4b959af9f
commit d81730d0b4
3 changed files with 103 additions and 32 deletions

View File

@@ -15,4 +15,4 @@ pytz = "*"
pylint = "*"
[requires]
python_version = "3.5"
python_version = "3.6"

4
Pipfile.lock generated
View File

@@ -1,11 +1,11 @@
{
"_meta": {
"hash": {
"sha256": "c71cc3fb1c386033f20ceadbde8da577f6af6c645333056a5e84bff225fe936d"
"sha256": "ffdcc7d81db0f5c5be8a6c0e7f3d57a38eff7d66c904d23adfc53ba06e7af315"
},
"pipfile-spec": 6,
"requires": {
"python_version": "3.5"
"python_version": "3.6"
},
"sources": [
{

125
main.py
View File

@@ -6,19 +6,10 @@ from httplib2 import Http
from oauth2client import file, client, tools
import pytz
# We don't store this url in the source, as it is sensitive
URL = open("timetableurl").read().strip()
def parse_events(page_data):
soup = BeautifulSoup(page_data, features="html.parser")
source = ""
for script in soup.head.findAll("script", {"type": "text/javascript"}):
if not script.has_attr("src"):
source = script.text
break
events_data = source.split("events:")[1].split("]")[0] +"]"
def parse_events(events_data):
# Replace date objects with tuples, easier to parse
events_data = events_data.replace("new Date", "")
@@ -37,6 +28,7 @@ def parse_events(page_data):
cleaned_data += line + "\n"
# Parse the event, as if it were a dict
parsed_data = eval(cleaned_data)
# Parse the datetime info
@@ -57,8 +49,22 @@ def parse_events(page_data):
return parsed_data
def get_timetable_data(url):
return requests.get(url).text
def get_events_data(url):
page_data = requests.get(url).text
soup = BeautifulSoup(page_data, features="html.parser")
source = ""
for script in soup.head.findAll("script", {"type": "text/javascript"}):
if not script.has_attr("src"):
source = script.text
break
events_data = source.split("events:")[1].split("]")[0] +"]"
return events_data
def create_google_event(event):
new_event = event.copy()
@@ -77,28 +83,66 @@ def create_google_event(event):
'overrides': [{'method': 'popup', 'minutes': 30}]}
return new_event
# If modifying these scopes, delete the file token.json.
SCOPES = 'https://www.googleapis.com/auth/calendar'
def main():
"""Shows basic usage of the Google Calendar API.
Prints the start and name of the next 10 events on the user's calendar.
"""
# Read and write access
SCOPES = "https://www.googleapis.com/auth/calendar"
type_to_color = {}
color_queue = list(range(0, 12))
def get_calendar_service():
'''
Connect to the google calendar service, and return the
service object
'''
store = file.Storage('token.json')
store = file.Storage("token.json")
creds = store.get()
# Run prompt to get the google credentials
if not creds or creds.invalid:
flow = client.flow_from_clientsecrets('credentials.json', SCOPES)
flow = client.flow_from_clientsecrets("redentials.json", SCOPES)
creds = tools.run_flow(flow, store)
service = build('calendar', 'v3', http=creds.authorize(Http()))
cov_events = parse_events(get_timetable_data(URL))
return build("calendar", "v3", http=creds.authorize(Http()))
def execute_batch(service, commands):
batch = service.new_batch_http_request()
batch_count = 0
for command in commands:
batch.add(command)
batch_count += 1
if batch_count > 999:
batch.execute()
batch = service.new_batch_http_request()
batch_count = 0
if batch_count > 0:
batch.execute()
def main():
type_to_color = {}
# A queue of colors, where a color is removed when
# when an event we have not seen before exists
color_queue = list(range(0, 12, 3))
service = get_calendar_service()
# Get a list of all events in the future
results = service.events().list(timeMin=datetime.now().isoformat() + 'Z', calendarId='primary').execute()
future_events = results.get("items", [])
cov_events = parse_events(get_events_data(URL))
new_events = []
new_summaries = set()
for event in cov_events:
if not event:
continue
new_event = create_google_event(event)
color_type = new_event["mainColor"]
@@ -112,6 +156,33 @@ def main():
new_event["colorId"] = colorId
service.events().insert(body=new_event, calendarId='primary').execute()
new_events.append(new_event)
new_summaries.add(new_event["summary"])
main()
# Make sure we remove old events so as not to create duplicates
if not future_events:
print('No existing events found')
else:
deletes = []
for existing_event in future_events:
if "summary" in existing_event and existing_event["summary"] in new_summaries:
deletes.append(service.events()
.delete(calendarId='primary',
eventId=existing_event['id']))
print(f'Removing {len(deletes)} existing events')
execute_batch(service, deletes)
inserts = []
for new_event in new_events:
inserts.append(service.events()
.insert(body=new_event,
calendarId='primary'))
print(f"Inserting {len(inserts)} new events")
execute_batch(service, inserts)
if __name__ == "__main__":
main()