diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..f3276877 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,13 @@ +language: python +python: "2.7" +sudo: false +services: + - rabbitmq + - postgresql +before_script: + - psql -c "CREATE DATABASE travisci;" -U postgres +install: + - pip install -r requirements.txt +script: python manage.py test +env: + - AWS_ENABLED=yes diff --git a/cloudpebble/settings.py b/cloudpebble/settings.py index c53c069e..68864da0 100644 --- a/cloudpebble/settings.py +++ b/cloudpebble/settings.py @@ -1,12 +1,16 @@ # encoding: utf-8 # Django settings for cloudpebble project. +import sys import os import socket import dj_database_url + _environ = os.environ DEBUG = _environ.get('DEBUG', '') != '' +VERBOSE = DEBUG or (_environ.get('VERBOSE', '') != '') +TESTING = 'test' in sys.argv TEMPLATE_DEBUG = DEBUG ADMINS = ( @@ -19,7 +23,18 @@ MANAGERS = ADMINS -if 'DATABASE_URL' not in _environ: +if 'TRAVIS' in _environ: + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.postgresql_psycopg2', + 'NAME': 'travisci', + 'USER': 'postgres', + 'PASSWORD': '', + 'HOST': 'localhost', + 'PORT': '', + } + } +elif 'DATABASE_URL' not in _environ: DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', @@ -231,32 +246,42 @@ 'djangobower', ) -# A sample logging configuration. The only tangible logging -# performed by this configuration is to send an email to -# the site admins on every HTTP 500 error when DEBUG=False. -# See http://docs.djangoproject.com/en/dev/topics/logging for -# more details on how to customize your logging configuration. +# This logging config prints: +# INFO logs from django +# INFO or DEBUG logs from 'ide', depending on whether DEBUG=True +# all WARNING logs from any sources LOGGING = { 'version': 1, 'disable_existing_loggers': False, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse' - } + 'formatters': { + 'verbose': { + 'format': "[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s", + 'datefmt': "%d/%b/%Y %H:%M:%S" + }, }, 'handlers': { - 'mail_admins': { - 'level': 'ERROR', - 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler' + 'console': { + 'level': 'DEBUG', + 'class': 'logging.StreamHandler', + 'formatter': 'verbose' } }, 'loggers': { - 'django.request': { - 'handlers': ['mail_admins'], - 'level': 'ERROR', - 'propagate': True, + 'django': { + 'handlers': ['console'], + 'level': 'INFO', + 'propagate': True }, + 'ide': { + 'handlers': ['console'], + 'level': 'DEBUG' if VERBOSE else 'INFO', + 'propagate': True + }, + '': { + 'handlers': ['console'], + 'level': 'WARNING', + 'propagate': False + } } } @@ -309,6 +334,7 @@ AWS_S3_SOURCE_BUCKET = _environ.get('AWS_S3_SOURCE_BUCKET', 'source.cloudpebble.net') AWS_S3_BUILDS_BUCKET = _environ.get('AWS_S3_BUILDS_BUCKET', 'builds.cloudpebble.net') AWS_S3_EXPORT_BUCKET = _environ.get('AWS_S3_EXPORT_BUCKET', 'export.cloudpebble.net') +AWS_S3_HOST = _environ.get('AWS_S3_HOST', None) AWS_S3_FAKE_S3 = _environ.get('AWS_S3_FAKE_S3', None) TYPOGRAPHY_CSS = _environ.get('TYPOGRAPHY_CSS', None) diff --git a/ide/api/qemu.py b/ide/api/qemu.py index 1eea97af..61a2fa4d 100644 --- a/ide/api/qemu.py +++ b/ide/api/qemu.py @@ -1,19 +1,22 @@ __author__ = 'katharine' import json +import requests +import random +import urlparse +import string +import logging + from django.conf import settings from django.contrib.auth.decorators import login_required -from django.http import HttpResponseNotFound from django.shortcuts import redirect, render from django.views.decorators.http import require_POST + from ide.api import json_response, json_failure -import requests -import random -import urlparse -import urllib -import string from utils.redis_helper import redis_client +logger = logging.getLogger(__name__) + @login_required @require_POST @@ -38,14 +41,12 @@ def launch_emulator(request): response.raise_for_status() response = response.json() except (requests.RequestException, ValueError) as e: - print "couldn't fetch old instance: %s" % e - pass + logger.info("couldn't fetch old instance: %s", e) else: if response.get('alive', False): return json_response(qemu_instance) else: - print "old instance is dead." - + logger.info("old instance is dead.") token = _generate_token() servers = set(settings.QEMU_URLS) @@ -74,10 +75,9 @@ def launch_emulator(request): redis_client.set(redis_key, json.dumps(response)) return json_response(response) except requests.HTTPError as e: - print e.response.text + logger.warning("Got HTTP error from QEMU launch. Content:\n%s", e.response.text) except (requests.RequestException, ValueError) as e: - print e - pass + logger.error("Error launching qemu: %s", e) return json_failure("Unable to create emulator instance.") diff --git a/ide/api/ycm.py b/ide/api/ycm.py index 041ea0f7..0d2ba143 100644 --- a/ide/api/ycm.py +++ b/ide/api/ycm.py @@ -1,4 +1,8 @@ import json +import logging +import random + +import requests from django.conf import settings from django.contrib.auth.decorators import login_required from django.shortcuts import get_object_or_404 @@ -8,12 +12,11 @@ from ide.api import json_response, json_failure from ide.models.project import Project -import requests -import random - __author__ = 'katharine' +logger = logging.getLogger(__name__) + @login_required @require_POST @@ -32,7 +35,7 @@ def init_autocomplete(request, project_id): if f.kind == 'png-trans': resource_ids.extend([ '#define RESOURCE_ID_%s_BLACK %d' % (identifier.resource_id, count), - '#define RESOURCE_ID_%s_WHITE %d' % (identifier.resource_id, count+1) + '#define RESOURCE_ID_%s_WHITE %d' % (identifier.resource_id, count + 1) ]) count += 2 else: @@ -71,6 +74,6 @@ def _spin_up_server(request): except (requests.RequestException, ValueError): import traceback traceback.print_exc() - print "Server %s failed; trying another." % server + logger.warning("Server %s failed; trying another.", server) # If we get out of here, something went wrong. return json_failure({'success': False, 'error': 'No servers'}) diff --git a/ide/git.py b/ide/git.py index 49e9f10b..d6b180b2 100644 --- a/ide/git.py +++ b/ide/git.py @@ -1,13 +1,17 @@ -from ide.models.user import UserGithub -from django.utils.translation import ugettext as _ - -from github import Github, BadCredentialsException, UnknownObjectException -from github.NamedUser import NamedUser -from django.conf import settings import base64 import json import urllib2 import re +import logging + +from github import Github, BadCredentialsException, UnknownObjectException +from github.NamedUser import NamedUser +from django.utils.translation import ugettext as _ +from django.conf import settings + +from ide.models.user import UserGithub + +logger = logging.getLogger(__name__) def git_auth_check(f): @@ -19,12 +23,13 @@ def g(user, *args, **kwargs): except BadCredentialsException: # Bad credentials; remove the user's auth data. try: - print "Bad credentials; revoking user's github tokens." + logger.warning("Bad credentials; revoking user's github tokens.") github = user.github github.delete() except: pass raise + return g diff --git a/ide/models/files.py b/ide/models/files.py index 2e6d105e..b0ed7c77 100644 --- a/ide/models/files.py +++ b/ide/models/files.py @@ -3,20 +3,23 @@ import traceback import datetime import json +import logging + from django.conf import settings -from django.core.validators import RegexValidator from django.db import models from django.db.models.signals import post_delete from django.dispatch import receiver from django.utils.timezone import now from django.core.validators import RegexValidator from django.utils.translation import ugettext as _ -import utils.s3 as s3 +import utils.s3 as s3 from ide.models.meta import IdeModel __author__ = 'katharine' +logger = logging.getLogger(__name__) + class ResourceFile(IdeModel): project = models.ForeignKey('Project', related_name='resources') @@ -355,7 +358,7 @@ def delete_file(sender, instance, **kwargs): try: s3.delete_file('source', instance.s3_path) except: - traceback.print_exc() + logger.exception("Failed to delete S3 file") else: try: os.unlink(instance.local_filename) diff --git a/ide/tasks/archive.py b/ide/tasks/archive.py index ff039ed0..65a0bcce 100644 --- a/ide/tasks/archive.py +++ b/ide/tasks/archive.py @@ -5,6 +5,8 @@ import uuid import zipfile import json +import logging + from celery import task from django.conf import settings from django.contrib.auth.models import User @@ -20,6 +22,8 @@ __author__ = 'katharine' +logger = logging.getLogger(__name__) + def add_project_to_archive(z, project, prefix=''): source_files = SourceFile.objects.filter(project=project) @@ -221,7 +225,7 @@ def make_valid_filename(zip_entry): raise ValueError("Generic resource filenames cannot contain a tilde (~)") if file_name not in desired_resources: desired_resources[root_file_name] = [] - print "Desired resource: %s" % root_file_name + desired_resources[root_file_name].append(resource) file_exists_for_root[root_file_name] = False @@ -233,18 +237,18 @@ def make_valid_filename(zip_entry): try: extracted = z.open("%s%s/%s" % (base_dir, RES_PATH, base_filename)) except KeyError: - print "Failed to open %s" % base_filename + logger.debug("Failed to open %s", base_filename) continue # Now we know the file exists and is in the resource directory - is it the one we want? tags, root_file_name = get_filename_variant(base_filename, tag_map) tags_string = ",".join(str(int(t)) for t in tags) - print "Importing file %s with root %s " % (entry.filename, root_file_name) + logger.debug("Importing file %s with root %s ", entry.filename, root_file_name) if root_file_name in desired_resources: medias = desired_resources[root_file_name] - print "Looking for variants of %s" % root_file_name + logger.debug("Looking for variants of %s", root_file_name) # Because 'kind' and 'is_menu_icons' are properties of ResourceFile in the database, # we just use the first one. @@ -260,7 +264,7 @@ def make_valid_filename(zip_entry): is_menu_icon=is_menu_icon) # But add a resource variant for every file - print "Adding variant %s with tags [%s]" % (root_file_name, tags_string) + logger.debug("Adding variant %s with tags [%s]", root_file_name, tags_string) actual_file_name = resource['file'] resource_variants[actual_file_name] = ResourceVariant.objects.create(resource_file=resources_files[root_file_name], tags=tags_string) resource_variants[actual_file_name].save_file(extracted) diff --git a/ide/tasks/build.py b/ide/tasks/build.py index 964fcf3a..c384b913 100644 --- a/ide/tasks/build.py +++ b/ide/tasks/build.py @@ -2,10 +2,10 @@ import shutil import subprocess import tempfile -import traceback import zipfile import json import resource +import logging from celery import task @@ -23,6 +23,8 @@ __author__ = 'katharine' +logger = logging.getLogger(__name__) + def _set_resource_limits(): resource.setrlimit(resource.RLIMIT_CPU, (120, 120)) # 120 seconds of CPU time @@ -76,7 +78,8 @@ def save_debug_info(base_dir, build_result, kind, platform, elf_file): try: debug_info = apptools.addr2lines.create_coalesced_group(path) except: - print traceback.format_exc() + # This will print the traceback + logger.exception("Failed to save debug info.") else: build_result.save_debug_info(debug_info, platform, kind) @@ -186,7 +189,7 @@ def run_compile(build_result): env=environ) except subprocess.CalledProcessError as e: output = e.output - print output + logger.warning("Build command failed with error:\n%s\n", output) success = False except Exception as e: success = False @@ -196,7 +199,7 @@ def run_compile(build_result): temp_file = os.path.join(base_dir, 'build', '%s.pbw' % os.path.basename(base_dir)) if not os.path.exists(temp_file): success = False - print "Success was a lie." + logger.warning("Success was a lie.") finally: build_end_time = now() os.chdir(cwd) @@ -213,7 +216,7 @@ def run_compile(build_result): store_size_info(project, build_result, 'chalk', z) except Exception as e: - print "Couldn't extract filesizes: %s" % e + logger.warning("Couldn't extract filesizes: %s", e) # Try pulling out debug information. if project.sdk_version == '2': @@ -227,7 +230,6 @@ def run_compile(build_result): save_debug_info(base_dir, build_result, BuildResult.DEBUG_APP, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-app.elf')) save_debug_info(base_dir, build_result, BuildResult.DEBUG_WORKER, 'chalk', os.path.join(base_dir, 'build', 'chalk/pebble-worker.elf')) - build_result.save_pbw(temp_file) build_result.save_build_log(output) build_result.state = BuildResult.STATE_SUCCEEDED if success else BuildResult.STATE_FAILED @@ -249,8 +251,7 @@ def run_compile(build_result): send_td_event(event_name, data, project=project) except Exception as e: - print "Build failed due to internal error: %s" % e - traceback.print_exc() + logger.exception("Build failed due to internal error: %s", e) build_result.state = BuildResult.STATE_FAILED build_result.finished = now() try: @@ -260,4 +261,4 @@ def run_compile(build_result): build_result.save() finally: # shutil.rmtree(base_dir) - print base_dir \ No newline at end of file + logger.debug("base_dir: %s", base_dir) diff --git a/ide/tasks/git.py b/ide/tasks/git.py index 10f9078d..769aada8 100644 --- a/ide/tasks/git.py +++ b/ide/tasks/git.py @@ -2,11 +2,14 @@ import urllib2 import json import os +import logging + from celery import task from django.conf import settings from django.utils.timezone import now from github.GithubObject import NotSet from github import Github, GithubException, InputGitTreeElement + from ide.git import git_auth_check, get_github from ide.models.build import BuildResult from ide.models.project import Project @@ -18,6 +21,8 @@ __author__ = 'katharine' +logger = logging.getLogger(__name__) + @task(acks_late=True) def do_import_github(project_id, github_user, github_project, github_branch, delete_project=False): @@ -108,13 +113,13 @@ def update_expected_paths(new_path): has_changed = True next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', content=source.get_contents()) - print "New file: %s" % repo_path + logger.debug("New file: %s", repo_path) else: sha = next_tree[repo_path]._InputGitTreeElement__sha our_content = source.get_contents() expected_sha = git_sha(our_content) if expected_sha != sha: - print "Updated file: %s" % repo_path + logger.debug("Updated file: %s", repo_path) next_tree[repo_path]._InputGitTreeElement__sha = NotSet next_tree[repo_path]._InputGitTreeElement__content = our_content has_changed = True @@ -129,16 +134,16 @@ def update_expected_paths(new_path): if repo_path in next_tree: content = variant.get_contents() if git_sha(content) != next_tree[repo_path]._InputGitTreeElement__sha: - print "Changed resource: %s" % repo_path + logger.debug("Changed resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(content), 'base64') - print "Created blob %s" % blob.sha + logger.debug("Created blob %s", blob.sha) next_tree[repo_path]._InputGitTreeElement__sha = blob.sha else: - print "New resource: %s" % repo_path + logger.debug("New resource: %s", repo_path) has_changed = True blob = repo.create_git_blob(base64.b64encode(variant.get_contents()), 'base64') - print "Created blob %s" % blob.sha + logger.debug("Created blob %s", blob.sha) next_tree[repo_path] = InputGitTreeElement(path=repo_path, mode='100644', type='blob', sha=blob.sha) # Manage deleted files @@ -147,7 +152,7 @@ def update_expected_paths(new_path): continue if path not in expected_paths: del next_tree[path] - print "Deleted file: %s" % path + logger.debug("Deleted file: %s", path) has_changed = True # Compare the resource dicts @@ -166,14 +171,14 @@ def update_expected_paths(new_path): our_res_dict = our_manifest_dict['resources'] if our_res_dict != their_res_dict: - print "Resources mismatch." + logger.debug("Resources mismatch.") has_changed = True # Try removing things that we've deleted, if any to_remove = set(x['file'] for x in their_res_dict['media']) - set(x['file'] for x in our_res_dict['media']) for path in to_remove: repo_path = resource_root + path if repo_path in next_tree: - print "Deleted resource: %s" % repo_path + logger.debug("Deleted resource: %s", repo_path) del next_tree[repo_path] # This one is separate because there's more than just the resource map changing. @@ -193,22 +198,22 @@ def update_expected_paths(new_path): # Commit the new tree. if has_changed: - print "Has changed; committing" + logger.debug("Has changed; committing") # GitHub seems to choke if we pass the raw directory nodes off to it, # so we delete those. for x in next_tree.keys(): if next_tree[x]._InputGitTreeElement__mode == '040000': del next_tree[x] - print "removing subtree node %s" % x + logger.debug("removing subtree node %s", x) - print [x._InputGitTreeElement__mode for x in next_tree.values()] + logger.debug([x._InputGitTreeElement__mode for x in next_tree.values()]) git_tree = repo.create_git_tree(next_tree.values()) - print "Created tree %s" % git_tree.sha + logger.debug("Created tree %s", git_tree.sha) git_commit = repo.create_git_commit(commit_message, git_tree, [commit]) - print "Created commit %s" % git_commit.sha + logger.debug("Created commit %s", git_commit.sha) git_ref = repo.get_git_ref('heads/%s' % (project.github_branch or repo.master_branch)) git_ref.edit(git_commit.sha) - print "Updated ref %s" % git_ref.ref + logger.debug("Updated ref %s", git_ref.ref) project.github_last_commit = git_commit.sha project.github_last_sync = now() project.save() @@ -270,7 +275,7 @@ def github_pull(user, project): for resource in media: path = resource_root + resource['file'] if project_type == 'pebblejs' and resource['name'] in { - 'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}: + 'MONO_FONT_14', 'IMAGE_MENU_ICON', 'IMAGE_LOGO_SPLASH', 'IMAGE_TILE_SPLASH'}: continue if path not in paths_notags: raise Exception("Resource %s not found in repo." % path) @@ -315,7 +320,7 @@ def do_github_pull(project_id): def hooked_commit(project_id, target_commit): project = Project.objects.select_related('owner__github').get(pk=project_id) did_something = False - print "Comparing %s versus %s" % (project.github_last_commit, target_commit) + logger.debug("Comparing %s versus %s", project.github_last_commit, target_commit) if project.github_last_commit != target_commit: github_pull(project.owner, project) did_something = True diff --git a/ide/tests/__init__.py b/ide/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/ide/tests.py b/ide/tests/test_git.py similarity index 71% rename from ide/tests.py rename to ide/tests/test_git.py index faa5ea97..b62c5e1c 100644 --- a/ide/tests.py +++ b/ide/tests/test_git.py @@ -3,7 +3,7 @@ """ from django.test import TestCase -import git +import ide.git class UrlToReposTest(TestCase): @@ -11,7 +11,7 @@ def test_basic_url_to_repo(self): """ Tests that a simple repo url is correctly recognized. """ - username, reponame = git.url_to_repo("https://github.com/pebble/cloudpebble") + username, reponame = ide.git.url_to_repo("https://github.com/pebble/cloudpebble") self.assertEqual("pebble", username) self.assertEqual("cloudpebble", reponame) @@ -19,7 +19,7 @@ def test_strange_url_to_repo(self): """ Tests that a non-standard repo url is correctly recognized. """ - username, reponame = git.url_to_repo("git://github.com:foo/bar.git") + username, reponame = ide.git.url_to_repo("git://github.com:foo/bar.git") self.assertEqual("foo", username) self.assertEqual("bar", reponame) @@ -27,4 +27,4 @@ def test_bad_url_to_repo(self): """ Tests that a entirely different url returns None. """ - self.assertEqual(None, git.url_to_repo("http://www.cuteoverload.com")) + self.assertEqual(None, ide.git.url_to_repo("http://www.cuteoverload.com")) diff --git a/ide/tests/test_source_api.py b/ide/tests/test_source_api.py new file mode 100644 index 00000000..24b5579e --- /dev/null +++ b/ide/tests/test_source_api.py @@ -0,0 +1,133 @@ +import json + +import mock +from django.core.urlresolvers import reverse +from django.core.exceptions import ValidationError +from django.utils.datastructures import MultiValueDictKeyError +from ide.utils.cloudpebble_test import CloudpebbleTestCase +from utils.fakes import FakeS3 + +__author__ = 'joe' + +fake_s3 = FakeS3() + + +@mock.patch('ide.models.files.s3', fake_s3) +class TestSource(CloudpebbleTestCase): + """Tests for the Tests models""" + + def setUp(self): + self.login() + + def create_file(self, name='file.c', content=None, target=None, success=True): + """ Create a source file """ + url = reverse('ide:create_source_file', args=[self.project_id]) + data = {} + if name is not None: + data['name'] = name + if content is not None: + data['content'] = content + if target is not None: + data['target'] = target + result = json.loads(self.client.post(url, data).content) + self.assertEqual(result['success'], success) + if success: + self.assertEqual(result['file']['name'], name) + self.assertEqual(result['file']['target'], target if target else 'app') + return result['file'] + + def load_file(self, id, success=True): + """ Load a source file's content """ + url = reverse('ide:load_source_file', args=[self.project_id, id]) + result = json.loads(self.client.get(url).content) + self.assertEqual(result['success'], success) + return result + + def rename_file(self, id, modified, old_name=None, new_name=None, success=True): + """ Rename a source file """ + url = reverse('ide:rename_source_file', args=[self.project_id, id]) + data = {} + if old_name is not None: + data['old_name'] = old_name + if new_name is not None: + data['new_name'] = new_name + if modified is not None: + data['modified'] = modified + result = json.loads(self.client.post(url, data).content) + self.assertEqual(result['success'], success) + return result + + def save_file(self, id, modified, content=None, folded_lines='[]', success=True): + """ Save new content to a source file """ + data = {} + if content is not None: + data['content'] = content + if folded_lines is not None: + data['folded_lines'] = folded_lines + if modified is not None: + data['modified'] = modified + url = reverse('ide:save_source_file', args=[self.project_id, id]) + result = json.loads(self.client.post(url, data).content) + self.assertEqual(result['success'], success) + + return result + + def get_source_names(self): + """ Get a list of project source file names """ + project = json.loads(self.client.get(reverse('ide:project_info', args=[self.project_id])).content) + return {x['name'] for x in project['source_files']} + + def test_create(self): + """ Test creating files in various valid states """ + self.create_file("c_file.c") + self.create_file("js_file.js") + self.create_file("with_content.c", content="blah" * 100) + self.create_file("without_content.c", content=None) + self.create_file("worker.c", target='worker') + + def test_create_load_save(self): + """ Test a full sequence of creating, loading, saving and re-loading a file""" + content = " Hello world ^^ " + new_content = "New content" + info = self.create_file(content=content) + loaded = self.load_file(info['id']) + self.assertEqual(content, loaded['source']) + self.save_file(info['id'], int(loaded['modified']), content=new_content) + loaded = self.load_file(info['id']) + self.assertEqual(new_content, loaded['source']) + + def test_create_with_invalid_target_throws_error(self): + """ Test that attempting to create a file with an invalid target throws an error """ + with self.assertRaises(ValidationError): + self.create_file(target='invalid') + + def test_create_with_invalid_names_throws_error(self): + """ Check that attempts to create files with invalid names throw errors """ + with self.assertRaises(MultiValueDictKeyError): + self.create_file(name=None) + with self.assertRaises(ValidationError): + self.create_file("no_extension") + with self.assertRaises(ValidationError): + self.create_file("bad_extension.html") + with self.assertRaises(ValidationError): + self.create_file(".c") + with self.assertRaises(ValidationError): + self.create_file("`unsafe characters`.c") + + def test_rename(self): + """ Check that files can be renamed """ + name1 = "name1.c" + name2 = "name2.c" + info = self.create_file(name1) + loaded = self.load_file(info['id']) + self.rename_file(info['id'], int(loaded['modified']), name1, name2) + self.assertIn(name2, self.get_source_names()) + + def test_rename_outdated_file_fails(self): + """ Check that a file which was modified externally fails to rename """ + name1 = "name1.c" + name2 = "name2.c" + info = self.create_file(name1) + loaded = self.load_file(info['id']) + self.rename_file(info['id'], int(loaded['modified'] - 5000), name1, name2, success=False) + self.assertIn(name1, self.get_source_names()) diff --git a/ide/utils/cloudpebble_test.py b/ide/utils/cloudpebble_test.py new file mode 100644 index 00000000..bb502daa --- /dev/null +++ b/ide/utils/cloudpebble_test.py @@ -0,0 +1,30 @@ +import json + +from django.test import TestCase +from django.test.client import Client +from django.test.utils import setup_test_environment +try: + from django.test import override_settings +except ImportError: + from django.test.utils import override_settings + +setup_test_environment() + + +# TODO: after moving to Django 1.9, use client.post().json() instead of json.loads(client.post().content) + +class CloudpebbleTestCase(TestCase): + """CloudpebbleTestCase provides convenience functions for other test cases""" + + def login(self, project_options=None): + self.client = Client() + self.client.post('/accounts/register', {'username': 'test', 'email': 'test@test.test', 'password1': 'test', 'password2': 'test'}) + login_result = self.client.login(username='test', password='test') + self.assertTrue(login_result) + create_data = {'name': 'test', 'template': 0, 'type': 'native', 'sdk': 3} + if project_options: + create_data.update(project_options) + new_project = json.loads(self.client.post('/ide/project/create', create_data).content) + self.assertTrue(new_project['success']) + self.project_id = new_project['id'] + diff --git a/ide/utils/mailinglist.py b/ide/utils/mailinglist.py index e5ce14d8..63f5a173 100644 --- a/ide/utils/mailinglist.py +++ b/ide/utils/mailinglist.py @@ -3,6 +3,8 @@ from django.conf import settings +logger = logging.getLogger(__name__) + mailchimp_default_list_id = settings.MAILCHIMP_LIST_ID @@ -10,12 +12,12 @@ def add_user(user, mailing_list_id=None): try: mailchimp_api = mailchimp.Mailchimp(apikey=settings.MAILCHIMP_API_KEY) except mailchimp.Error: - logging.error("Missing or invalid MAILCHIMP_API_KEY") + logger.error("Missing or invalid MAILCHIMP_API_KEY") return list_id = mailing_list_id or mailchimp_default_list_id if list_id is None: - logging.error("Missing MAILCHIMP_LIST_ID") + logger.error("Missing MAILCHIMP_LIST_ID") return try: @@ -24,10 +26,10 @@ def add_user(user, mailing_list_id=None): double_optin=False, update_existing=False, replace_interests=False) - logging.debug("{} was successfully subscribed to list {}".format(response['email'], list_id)) + logger.debug("{} was successfully subscribed to list {}".format(response['email'], list_id)) except mailchimp.ListDoesNotExistError: - logging.error("List {} does not exist".format(list_id)) + logger.error("List {} does not exist".format(list_id)) except mailchimp.ListAlreadySubscribedError: - logging.info("User already subscribed to list {}".format(list_id)) + logger.info("User already subscribed to list {}".format(list_id)) except mailchimp.Error as e: - logging.error("An error occurred: {} - {}".format(e.__class__, e)) + logger.error("An error occurred: {} - {}".format(e.__class__, e)) diff --git a/ide/utils/project.py b/ide/utils/project.py index 0aa3ddca..82af84d8 100644 --- a/ide/utils/project.py +++ b/ide/utils/project.py @@ -1,30 +1,35 @@ -__author__ = 'katharine' +import logging + from django.utils.translation import ugettext as _ +__author__ = 'katharine' + +logger = logging.getLogger(__name__) + def find_project_root(contents): MANIFEST = 'appinfo.json' SRC_DIR = 'src/' for base_dir in contents: - print base_dir + logger.debug("base_dir: %s", base_dir) try: dir_end = base_dir.index(MANIFEST) - print dir_end + logger.debug("dir_end: %s", dir_end) except ValueError: continue else: if dir_end + len(MANIFEST) != len(base_dir): - print 'failed' + logger.debug('failed') continue base_dir = base_dir[:dir_end] - print base_dir + logger.debug("base_dir: %s", base_dir) for source_dir in contents: if source_dir[:dir_end] != base_dir: continue if not source_dir.endswith('.c') and not source_dir.endswith('.js'): continue - if source_dir[dir_end:dir_end+len(SRC_DIR)] != SRC_DIR: + if source_dir[dir_end:dir_end + len(SRC_DIR)] != SRC_DIR: continue break else: diff --git a/requirements.txt b/requirements.txt index e78404bc..0696f5d7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,7 @@ psycogreen==1.0 requests==2.7.0 oauth2client==1.3 django-bower==5.0.4 - +mock==2.0.0 # This is used for Mailchimp integration mailchimp==2.0.9 diff --git a/utils/fakes.py b/utils/fakes.py new file mode 100644 index 00000000..2db691db --- /dev/null +++ b/utils/fakes.py @@ -0,0 +1,39 @@ +class FakeRedis(object): + """ Essentially just a dictionary accessed via 'get' and 'set' methods """ + + def __init__(self): + self.storage = {} + self.ex = None + + def set(self, key, value, ex=0): + self.storage[key] = str(value) + self.ex = ex + + def get(self, key, ex=0): + self.ex = ex + return self.storage.get(key, None) + + +class FakeS3(object): + """ Essentially just a dictionary where the keys are tuples of (bucket_name, path) """ + def __init__(self): + self.dict = {} + self.last_key = None + + def read_file(self, bucket_name, path): + return self.dict[(bucket_name, path)] + + def save_file(self, bucket_name, path, value, **kwargs): + self.dict[(bucket_name, path)] = value + self.last_key = (bucket_name, path) + + def delete_file(self, bucket_name, path): + del self.dict[(bucket_name, path)] + + def read_file_to_filesystem(self, bucket_name, path, destination): + with open(destination, 'w') as f: + f.write(self.read_file(bucket_name, path)) + + def upload_file(self, bucket_name, path, src_path, **kwargs): + with open(src_path, 'r') as f: + self.save_file(bucket_name, path, f.read()) diff --git a/utils/s3.py b/utils/s3.py index 4294326d..197fbebe 100644 --- a/utils/s3.py +++ b/utils/s3.py @@ -1,8 +1,13 @@ +import logging + import boto from boto.s3.key import Key -from boto.s3.connection import OrdinaryCallingFormat +from boto.s3.connection import OrdinaryCallingFormat, NoHostProvided from django.conf import settings -import urllib + +logger = logging.getLogger(__name__) + + def _ensure_bucket_exists(s3, bucket): try: @@ -10,28 +15,61 @@ def _ensure_bucket_exists(s3, bucket): except boto.exception.S3ResponseError: pass else: - print "Created bucket %s" % bucket - -if settings.AWS_ENABLED: - if settings.AWS_S3_FAKE_S3 is None: - _s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY) - else: - host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2] - port = int(port) - _s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port, - host=host, calling_format=OrdinaryCallingFormat()) - _ensure_bucket_exists(_s3, settings.AWS_S3_SOURCE_BUCKET) - _ensure_bucket_exists(_s3, settings.AWS_S3_EXPORT_BUCKET) - _ensure_bucket_exists(_s3, settings.AWS_S3_BUILDS_BUCKET) - - _buckets = { - 'source': _s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET), - 'export': _s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET), - 'builds': _s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET), - } -else: - _s3 = None - _buckets = None + logger.info("Created bucket %s" % bucket) + + + +class BucketHolder(object): + """ The bucket holder configures s3 when it is first accessed. This cannot be done on module import due to quirks in Django's settings system. + See: https://docs.djangoproject.com/en/dev/internals/contributing/writing-code/coding-style/#use-of-django-conf-settings """ + + def __init__(self): + self.buckets = {} + self.configured = False + self.s3 = None + + def configure(self): + if settings.AWS_ENABLED: + if settings.AWS_S3_FAKE_S3 is None: + # The host must be manually specified in Python 2.7.9+ due to + # https://github.com/boto/boto/issues/2836 this bug in boto with .s in + # bucket names. + host = settings.AWS_S3_HOST if settings.AWS_S3_HOST else NoHostProvided + + self.s3 = boto.connect_s3( + settings.AWS_ACCESS_KEY_ID, + settings.AWS_SECRET_ACCESS_KEY, + host=host, + calling_format=OrdinaryCallingFormat() + ) + else: + host, port = (settings.AWS_S3_FAKE_S3.split(':', 2) + [80])[:2] + port = int(port) + self.s3 = boto.connect_s3("key_id", "secret_key", is_secure=False, port=port, + host=host, calling_format=OrdinaryCallingFormat()) + _ensure_bucket_exists(self.s3, settings.AWS_S3_SOURCE_BUCKET) + _ensure_bucket_exists(self.s3, settings.AWS_S3_EXPORT_BUCKET) + _ensure_bucket_exists(self.s3, settings.AWS_S3_BUILDS_BUCKET) + + self.buckets = { + 'source': self.s3.get_bucket(settings.AWS_S3_SOURCE_BUCKET), + 'export': self.s3.get_bucket(settings.AWS_S3_EXPORT_BUCKET), + 'builds': self.s3.get_bucket(settings.AWS_S3_BUILDS_BUCKET), + } + self.configured = True + else: + self.s3 = None + self.buckets = None + + def __getitem__(self, item): + if settings.TESTING: + raise Exception("S3 not mocked in test!") + if not self.configured: + self.configure() + return self.buckets[item] + + +_buckets = BucketHolder() def _requires_aws(fn): @@ -40,6 +78,7 @@ def _requires_aws(fn): else: def complain(*args, **kwargs): raise Exception("AWS_ENABLED must be True to call %s" % fn.__name__) + return complain @@ -56,12 +95,14 @@ def read_file_to_filesystem(bucket_name, path, destination): key = bucket.get_key(path) key.get_contents_to_filename(destination) + @_requires_aws def delete_file(bucket_name, path): bucket = _buckets[bucket_name] key = bucket.get_key(path) key.delete() + @_requires_aws def save_file(bucket_name, path, value, public=False, content_type='application/octet-stream'): bucket = _buckets[bucket_name] @@ -92,7 +133,7 @@ def upload_file(bucket_name, dest_path, src_path, public=False, content_type='ap } if download_filename is not None: - headers['Content-Disposition'] = 'attachment;filename="%s"' % download_filename.replace(' ','_') + headers['Content-Disposition'] = 'attachment;filename="%s"' % download_filename.replace(' ', '_') key.set_contents_from_filename(src_path, policy=policy, headers=headers)