This way you only have to open the file once per page, instead of a hundred times. Even better would be to make the whole thing a generator, so you can write all rows at while opening the file only once:
def get__all_jobsget_all_jobs(url, cities, pages):
for city in cities:
for page in pages:
params = {"page": page, "results_per_page": 100, "locations[0]": city}
response = requests.get(url, params=params)
# check status code
yield from get_job_infos(response)
# rate throttling, etc here
...
if __name__ == "__main__":
cities = ["Menlo Park, CA", ...]
pages = range(1, 3)
url = "https://www.facebook.com/careers/jobs/"
with open('facebook_job_list.csv', "w") as f:
writer = csv.writer(f)
writer.writerow(["Website", "Title", "Location", "Job URL"])
writer.writerows(get_all_jobs(url, pages, cities))