Skip to main content
edited tags; edited title
Link
Jamal
  • 35.2k
  • 13
  • 134
  • 238

Is there a better way to catch Catching API changes for a service that doesn't version its API?

added link to the API I'm using and made title more specific
Source Link
drs
  • 131
  • 4

Is there a better way to catch API changes for a service that doesn't version its API?

I'm using the Google Spreadsheets APIold Google Data API to access several spreadsheets and put their contents in a database using Flask and SQLAlchemy. Each run deletes the contents of the database tables and repopulates them with results of the API call.

Is there a better way to catch API changes?

I'm using the Google Spreadsheets API to access several spreadsheets and put their contents in a database using Flask and SQLAlchemy. Each run deletes the contents of the database tables and repopulates them with results of the API call.

Is there a better way to catch API changes for a service that doesn't version its API?

I'm using the old Google Data API to access several spreadsheets and put their contents in a database using Flask and SQLAlchemy. Each run deletes the contents of the database tables and repopulates them with results of the API call.

added api tag and replaced missing return code
Source Link
drs
  • 131
  • 4
def get_google_content(key, db_table):
    if DEBUG:
        print 'Fetching content from Google Drive'
    spreadsheet_url = ('https://spreadsheets.google.com/feeds/worksheets/'
                       '{}/public/basic?alt=json'.format(key))
    spreadsheet_req = requests.get(spreadsheet_url)
    if spreadsheet_req.status_code != 200:
        print 'Failed to retrieve spreadsheet from {}'.format(spreadsheet_url)
        print 'Google Spreadsheet returned {}'.format(
                spreadsheet_req.status_code)
        db.session.rollback()
        return 1

    sheet_ids = list(OrderedDict.fromkeys(re.findall(r'/public/basic/(\w*)',
                     spreadsheet_req.text, flags=0)))

    # The only trivial way to catch removed spreadsheet data is to delete the
    # table and fill it all out again
    db.session.query(db_table).delete()

    for sheet_id in sheet_ids:
        worksheet_url = ('https://spreadsheets.google.com/feeds/list/{}/{}/'
                         'public/values?alt=json'.format(key, sheet_id))
        worksheet_req = requests.get(worksheet_url)
        if worksheet_req.status_code != 200:
            print 'Failed to retrieve spreadsheet from {}'.format(worksheet_url)
            print 'Google Spreadsheet returned {}'.format(
                    worksheet_req.status_code)
            db.session.rollback()
            return 1

        try:
            sheet_name = worksheet_req.json()['feed']['title']['$t']
            for row in worksheet_req.json()['feed']['entry']:
                db_row = db_table(column1=sheet_name,
                                  column1=row['gsx$location']['$t'],
                                  column2=row['gsx$caption']['$t'])
                db.session.add(db_row)
        except KeyError:
            print 'KeyError accessing Google Spreadsheet JSON from {}'.format(
                worksheet_url)
            print 'API changed?'
            db.session.rollback()
            return 1

        db.session.commit()
        return 0
def get_google_content(key, db_table):
    if DEBUG:
        print 'Fetching content from Google Drive'
    spreadsheet_url = ('https://spreadsheets.google.com/feeds/worksheets/'
                       '{}/public/basic?alt=json'.format(key))
    spreadsheet_req = requests.get(spreadsheet_url)
    if spreadsheet_req.status_code != 200:
        print 'Failed to retrieve spreadsheet from {}'.format(spreadsheet_url)
        print 'Google Spreadsheet returned {}'.format(
                spreadsheet_req.status_code)
        db.session.rollback()
        return 1

    sheet_ids = list(OrderedDict.fromkeys(re.findall(r'/public/basic/(\w*)',
                     spreadsheet_req.text, flags=0)))

    # The only trivial way to catch removed spreadsheet data is to delete the
    # table and fill it all out again
    db.session.query(db_table).delete()

    for sheet_id in sheet_ids:
        worksheet_url = ('https://spreadsheets.google.com/feeds/list/{}/{}/'
                         'public/values?alt=json'.format(key, sheet_id))
        worksheet_req = requests.get(worksheet_url)
        if worksheet_req.status_code != 200:
            print 'Failed to retrieve spreadsheet from {}'.format(worksheet_url)
            print 'Google Spreadsheet returned {}'.format(
                    worksheet_req.status_code)
            db.session.rollback()
            return

        try:
            sheet_name = worksheet_req.json()['feed']['title']['$t']
            for row in worksheet_req.json()['feed']['entry']:
                db_row = db_table(column1=sheet_name,
                                  column1=row['gsx$location']['$t'],
                                  column2=row['gsx$caption']['$t'])
                db.session.add(db_row)
        except KeyError:
            print 'KeyError accessing Google Spreadsheet JSON from {}'.format(
                worksheet_url)
            print 'API changed?'
            db.session.rollback()
            return 1

        db.session.commit()
        return 0
def get_google_content(key, db_table):
    if DEBUG:
        print 'Fetching content from Google Drive'
    spreadsheet_url = ('https://spreadsheets.google.com/feeds/worksheets/'
                       '{}/public/basic?alt=json'.format(key))
    spreadsheet_req = requests.get(spreadsheet_url)
    if spreadsheet_req.status_code != 200:
        print 'Failed to retrieve spreadsheet from {}'.format(spreadsheet_url)
        print 'Google Spreadsheet returned {}'.format(
                spreadsheet_req.status_code)
        db.session.rollback()
        return 1

    sheet_ids = list(OrderedDict.fromkeys(re.findall(r'/public/basic/(\w*)',
                     spreadsheet_req.text, flags=0)))

    # The only trivial way to catch removed spreadsheet data is to delete the
    # table and fill it all out again
    db.session.query(db_table).delete()

    for sheet_id in sheet_ids:
        worksheet_url = ('https://spreadsheets.google.com/feeds/list/{}/{}/'
                         'public/values?alt=json'.format(key, sheet_id))
        worksheet_req = requests.get(worksheet_url)
        if worksheet_req.status_code != 200:
            print 'Failed to retrieve spreadsheet from {}'.format(worksheet_url)
            print 'Google Spreadsheet returned {}'.format(
                    worksheet_req.status_code)
            db.session.rollback()
            return 1

        try:
            sheet_name = worksheet_req.json()['feed']['title']['$t']
            for row in worksheet_req.json()['feed']['entry']:
                db_row = db_table(column1=sheet_name,
                                  column1=row['gsx$location']['$t'],
                                  column2=row['gsx$caption']['$t'])
                db.session.add(db_row)
        except KeyError:
            print 'KeyError accessing Google Spreadsheet JSON from {}'.format(
                worksheet_url)
            print 'API changed?'
            db.session.rollback()
            return 1

        db.session.commit()
        return 0
Source Link
drs
  • 131
  • 4
Loading