from gitadmin.adm.models import *
class RepositoryPermissionInline(admin.TabularInline):
- model = RepositoryPermission
+ model = RepositoryPermission
class RepositoryAdmin(admin.ModelAdmin):
- list_display = ('name','approved', )
- ordering = ('approved', 'name', )
- inlines = [ RepositoryPermissionInline, ]
+ list_display = ('name','approved', )
+ ordering = ('approved', 'name', )
+ inlines = [ RepositoryPermissionInline, ]
admin.site.register(Repository, RepositoryAdmin)
admin.site.register(RemoteRepository)
from gitadmin.adm.models import Repository
class RepositoryForm(ModelForm):
- initialclone = forms.RegexField(r'^(git://.+/.+|[^:]+)$',max_length=256,required=False,
- label="Initial clone",
-# help_text='<a href="javascript:popupRepoList()">Select repository</a>')
- help_text='Input a valid local repository name or git:// URL')
+ initialclone = forms.RegexField(r'^(git://.+/.+|[^:]+)$',max_length=256,required=False,
+ label="Initial clone",
+# help_text='<a href="javascript:popupRepoList()">Select repository</a>')
+ help_text='Input a valid local repository name or git:// URL')
- class Meta:
- model = Repository
- exclude = ('repoid','name', )
+ class Meta:
+ model = Repository
+ exclude = ('repoid','name', )
class ConfirmDeleteForm(Form):
- confirmed = forms.BooleanField(required=True,label="Confirm deleting the repository")
+ confirmed = forms.BooleanField(required=True,label="Confirm deleting the repository")
import datetime
PERMISSION_CHOICES = (
- (0, 'Read'),
- (1, 'Write'),
- (2, 'Owner'),
+ (0, 'Read'),
+ (1, 'Write'),
+ (2, 'Owner'),
)
class RemoteRepositoryType(models.Model):
- repotype = models.CharField(max_length=16, blank=False, null=False, primary_key=True)
+ repotype = models.CharField(max_length=16, blank=False, null=False, primary_key=True)
- def __str__(self):
- return self.repotype
+ def __str__(self):
+ return self.repotype
- class Meta:
- db_table = 'remoterepositorytypes'
+ class Meta:
+ db_table = 'remoterepositorytypes'
class RemoteRepository(models.Model):
- repotype = models.ForeignKey(RemoteRepositoryType, null=False)
- remoteurl = models.CharField(max_length=256, blank=False) # rsync or cvs
- remotemodule = models.CharField(max_length=32, blank=False)
- lastsynced = models.DateTimeField(null=False, default=datetime.datetime.now)
+ repotype = models.ForeignKey(RemoteRepositoryType, null=False)
+ remoteurl = models.CharField(max_length=256, blank=False) # rsync or cvs
+ remotemodule = models.CharField(max_length=32, blank=False)
+ lastsynced = models.DateTimeField(null=False, default=datetime.datetime.now)
- def __str__(self):
- return self.remoteurl
+ def __str__(self):
+ return self.remoteurl
- class Meta:
- db_table = 'remoterepositories'
- verbose_name_plural = 'remote repositories'
+ class Meta:
+ db_table = 'remoterepositories'
+ verbose_name_plural = 'remote repositories'
class Repository(models.Model):
- repoid = models.AutoField(blank=False, primary_key=True)
- name = models.CharField(max_length=64, blank=False, unique=True)
- description = models.TextField(max_length=1024, blank=False)
- anonymous = models.BooleanField(blank=False,default=False,verbose_name='Enable anonymous access')
- web = models.BooleanField(blank=False,default=False,verbose_name='Enable gitweb access')
- approved = models.BooleanField(blank=False, default=False)
- tabwidth = models.IntegerField(default=8, null=False)
- initialclone = models.CharField(max_length=256, blank=True, null=True)
- remoterepository = models.ForeignKey(RemoteRepository, null=True, blank=True,
- verbose_name='Remote repository')
-
- def ValidateOwnerPermissions(self, user):
- if self.repositorypermission_set.filter(userid=user.username, level=2).count() != 1:
- raise Exception('You need owner permissions to do that!')
-
- def __str__(self):
- return self.name
-
- class Meta:
- db_table = 'repositories'
- verbose_name_plural = 'repositories'
+ repoid = models.AutoField(blank=False, primary_key=True)
+ name = models.CharField(max_length=64, blank=False, unique=True)
+ description = models.TextField(max_length=1024, blank=False)
+ anonymous = models.BooleanField(blank=False,default=False,verbose_name='Enable anonymous access')
+ web = models.BooleanField(blank=False,default=False,verbose_name='Enable gitweb access')
+ approved = models.BooleanField(blank=False, default=False)
+ tabwidth = models.IntegerField(default=8, null=False)
+ initialclone = models.CharField(max_length=256, blank=True, null=True)
+ remoterepository = models.ForeignKey(RemoteRepository, null=True, blank=True,
+ verbose_name='Remote repository')
+
+ def ValidateOwnerPermissions(self, user):
+ if self.repositorypermission_set.filter(userid=user.username, level=2).count() != 1:
+ raise Exception('You need owner permissions to do that!')
+
+ def __str__(self):
+ return self.name
+
+ class Meta:
+ db_table = 'repositories'
+ verbose_name_plural = 'repositories'
class RepositoryPermission(models.Model):
- repository = models.ForeignKey(Repository, db_column='repository')
- userid = models.CharField(max_length=255, blank=False)
- level = models.IntegerField(default=0, verbose_name='Permission', choices=PERMISSION_CHOICES)
-
- @property
- def write(self):
- return (self.level > 0)
-
- @property
- def owner(self):
- return (self.level > 1)
-
- def __str__(self):
- return "%s (%s)" % (self.userid, self.__permstr())
-
- def __permstr(self):
- if self.level == 2:
- return "Owner"
- elif self.level == 1:
- return "Write"
- return "Read"
-
- class Meta:
- db_table = 'repository_permissions'
- unique_together = (('repository','userid'),)
+ repository = models.ForeignKey(Repository, db_column='repository')
+ userid = models.CharField(max_length=255, blank=False)
+ level = models.IntegerField(default=0, verbose_name='Permission', choices=PERMISSION_CHOICES)
+
+ @property
+ def write(self):
+ return (self.level > 0)
+
+ @property
+ def owner(self):
+ return (self.level > 1)
+
+ def __str__(self):
+ return "%s (%s)" % (self.userid, self.__permstr())
+
+ def __permstr(self):
+ if self.level == 2:
+ return "Owner"
+ elif self.level == 1:
+ return "Write"
+ return "Read"
+
+ class Meta:
+ db_table = 'repository_permissions'
+ unique_together = (('repository','userid'),)
class GitUser(models.Model):
- userid = models.CharField(max_length=255, blank=False, primary_key=True)
- sshkey = models.CharField(max_length=10240, blank=True)
+ userid = models.CharField(max_length=255, blank=False, primary_key=True)
+ sshkey = models.CharField(max_length=10240, blank=True)
- class Meta:
- db_table = 'git_users'
+ class Meta:
+ db_table = 'git_users'
# Utility classes
class FormIsNotValid(Exception):
- pass
+ pass
# Utility functions
def _MissingSshkey(user):
- if not user.is_authenticated():
- return False
- try:
- gu = GitUser.objects.get(userid=user.username)
- if gu.sshkey != '':
- return False
- else:
- return True
- except:
- return True
+ if not user.is_authenticated():
+ return False
+ try:
+ gu = GitUser.objects.get(userid=user.username)
+ if gu.sshkey != '':
+ return False
+ else:
+ return True
+ except:
+ return True
def context_add(request):
- return {
- 'missing_sshkey': _MissingSshkey(request.user),
- }
+ return {
+ 'missing_sshkey': _MissingSshkey(request.user),
+ }
# Views
@login_required
def index(request):
- repos = Repository.objects.extra(
- where=["remoterepository_id IS NULL AND repoid IN (SELECT repository FROM repository_permissions where userid=%s)"],
- select={'perm':"SELECT CASE WHEN level>1 THEN 't'::boolean ELSE 'f'::boolean END FROM repository_permissions WHERE userid=%s AND repository_permissions.repository=repositories.repoid"},
- params=[request.user.username], select_params=[request.user.username]).order_by('name')
- return render(request, 'index.html', {
- 'repos': repos,
- })
+ repos = Repository.objects.extra(
+ where=["remoterepository_id IS NULL AND repoid IN (SELECT repository FROM repository_permissions where userid=%s)"],
+ select={'perm':"SELECT CASE WHEN level>1 THEN 't'::boolean ELSE 'f'::boolean END FROM repository_permissions WHERE userid=%s AND repository_permissions.repository=repositories.repoid"},
+ params=[request.user.username], select_params=[request.user.username]).order_by('name')
+ return render(request, 'index.html', {
+ 'repos': repos,
+ })
def help(request):
- return render(request, 'help.html')
+ return render(request, 'help.html')
@login_required
@transaction.atomic
def editrepo(request, repoid):
- repo = get_object_or_404(Repository, repoid=repoid)
- repo.ValidateOwnerPermissions(request.user)
- savedat = None
- form = None
-
- formfactory = inlineformset_factory(Repository, RepositoryPermission, extra=1, fields=['userid','level'])
-
- if request.method == "POST":
- form = RepositoryForm(data=request.POST, instance=repo)
- formset = formfactory(data=request.POST, instance=repo)
- del form.fields['approved']
- if repo.approved:
- del form.fields['initialclone']
- del form.fields['remoterepository']
-
- if form.is_valid() and formset.is_valid():
- try:
- # Manually validate the repository entered if there is one to clone
- if form.cleaned_data.has_key('initialclone') and form.cleaned_data['initialclone']:
- if form.cleaned_data['initialclone'].startswith('git://'):
- # Validate hostnames and stuff?
- pass
- else:
- # Assume local
- try:
- r = Repository.objects.get(name=form.cleaned_data['initialclone'])
- if not r.anonymous:
- form._errors['initialclone'] = form._errors.get('initialclone', [])
- form._errors['initialclone'].append('Specified repository is not available anonymously')
- raise FormIsNotValid()
- except Repository.DoesNotExist:
- form._errors['initialclone'] = form._errors.get('initialclone', [])
- form._errors['initialclone'].append('Specified repository does not exist')
- raise FormIsNotValid()
-
- form.save()
- formset.save()
- savedat = datetime.datetime.now()
- # Get a new copy of the repository to make sure it refreshes!
- repo = get_object_or_404(Repository, repoid=repoid)
- except FormIsNotValid:
- # Just continue as if the form wasn't valid, expect the caller
- # to have set the required error fields
- pass
-
- if not form or not form.errors:
- form = RepositoryForm(instance=repo)
- del form.fields['approved']
- if repo.approved:
- del form.fields['initialclone']
- del form.fields['remoterepository']
- formset = formfactory(instance=repo)
-
- perm = repo.repositorypermission_set.all()
-
- return render(request, 'repoview.html', {
- 'form': form,
- 'formset': formset,
- 'repo': repo,
- 'repoperm': perm,
- 'form_saved_at': savedat,
- })
+ repo = get_object_or_404(Repository, repoid=repoid)
+ repo.ValidateOwnerPermissions(request.user)
+ savedat = None
+ form = None
+
+ formfactory = inlineformset_factory(Repository, RepositoryPermission, extra=1, fields=['userid','level'])
+
+ if request.method == "POST":
+ form = RepositoryForm(data=request.POST, instance=repo)
+ formset = formfactory(data=request.POST, instance=repo)
+ del form.fields['approved']
+ if repo.approved:
+ del form.fields['initialclone']
+ del form.fields['remoterepository']
+
+ if form.is_valid() and formset.is_valid():
+ try:
+ # Manually validate the repository entered if there is one to clone
+ if form.cleaned_data.has_key('initialclone') and form.cleaned_data['initialclone']:
+ if form.cleaned_data['initialclone'].startswith('git://'):
+ # Validate hostnames and stuff?
+ pass
+ else:
+ # Assume local
+ try:
+ r = Repository.objects.get(name=form.cleaned_data['initialclone'])
+ if not r.anonymous:
+ form._errors['initialclone'] = form._errors.get('initialclone', [])
+ form._errors['initialclone'].append('Specified repository is not available anonymously')
+ raise FormIsNotValid()
+ except Repository.DoesNotExist:
+ form._errors['initialclone'] = form._errors.get('initialclone', [])
+ form._errors['initialclone'].append('Specified repository does not exist')
+ raise FormIsNotValid()
+
+ form.save()
+ formset.save()
+ savedat = datetime.datetime.now()
+ # Get a new copy of the repository to make sure it refreshes!
+ repo = get_object_or_404(Repository, repoid=repoid)
+ except FormIsNotValid:
+ # Just continue as if the form wasn't valid, expect the caller
+ # to have set the required error fields
+ pass
+
+ if not form or not form.errors:
+ form = RepositoryForm(instance=repo)
+ del form.fields['approved']
+ if repo.approved:
+ del form.fields['initialclone']
+ del form.fields['remoterepository']
+ formset = formfactory(instance=repo)
+
+ perm = repo.repositorypermission_set.all()
+
+ return render(request, 'repoview.html', {
+ 'form': form,
+ 'formset': formset,
+ 'repo': repo,
+ 'repoperm': perm,
+ 'form_saved_at': savedat,
+ })
@login_required
@transaction.atomic
def deleterepo(request, repoid):
- repo = get_object_or_404(Repository, repoid=repoid)
- repo.ValidateOwnerPermissions(request.user)
+ repo = get_object_or_404(Repository, repoid=repoid)
+ repo.ValidateOwnerPermissions(request.user)
- if request.method == 'POST':
- form = ConfirmDeleteForm(data=request.POST)
+ if request.method == 'POST':
+ form = ConfirmDeleteForm(data=request.POST)
- if form.is_valid():
- repo.delete()
- return HttpResponseRedirect('../../../')
- else:
- form = ConfirmDeleteForm()
+ if form.is_valid():
+ repo.delete()
+ return HttpResponseRedirect('../../../')
+ else:
+ form = ConfirmDeleteForm()
- return render(request, 'deleterepo.html', {
- 'form': form,
- })
+ return render(request, 'deleterepo.html', {
+ 'form': form,
+ })
@login_required
@transaction.atomic
def newrepo(request):
- if request.method != "POST":
- raise Exception("Must be posted!")
- newname = request.POST['reponame']
- r = re.compile(r'^[a-z0-9-_/]{5,64}$')
- if not r.match(newname):
- raise Exception("Format of project name is invalid!")
-
- repo = Repository(name=newname)
- repo.save()
- perm = RepositoryPermission(userid=request.user.username, repository=repo, level=2)
- perm.save()
-
- return HttpResponseRedirect('../repo/%s/' % repo.repoid)
+ if request.method != "POST":
+ raise Exception("Must be posted!")
+ newname = request.POST['reponame']
+ r = re.compile(r'^[a-z0-9-_/]{5,64}$')
+ if not r.match(newname):
+ raise Exception("Format of project name is invalid!")
+
+ repo = Repository(name=newname)
+ repo.save()
+ perm = RepositoryPermission(userid=request.user.username, repository=repo, level=2)
+ perm.save()
+
+ return HttpResponseRedirect('../repo/%s/' % repo.repoid)
import time
class AuthBackend(ModelBackend):
- # We declare a fake backend that always fails direct authentication -
- # since we should never be using direct authentication in the first place!
- def authenticate(self, username=None, password=None):
- raise Exception("Direct authentication not supported")
+ # We declare a fake backend that always fails direct authentication -
+ # since we should never be using direct authentication in the first place!
+ def authenticate(self, username=None, password=None):
+ raise Exception("Direct authentication not supported")
####
# Handle login requests by sending them off to the main site
def login(request):
- if request.GET.has_key('next'):
- return HttpResponseRedirect("%s?su=%s" % (
- settings.PGAUTH_REDIRECT,
- quote_plus(request.GET['next']),
- ))
- else:
- return HttpResponseRedirect(settings.PGAUTH_REDIRECT)
+ if request.GET.has_key('next'):
+ return HttpResponseRedirect("%s?su=%s" % (
+ settings.PGAUTH_REDIRECT,
+ quote_plus(request.GET['next']),
+ ))
+ else:
+ return HttpResponseRedirect(settings.PGAUTH_REDIRECT)
# Handle logout requests by logging out of this site and then
# redirecting to log out from the main site as well.
def logout(request):
- if request.user.is_authenticated():
- django_logout(request)
- return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT)
+ if request.user.is_authenticated():
+ django_logout(request)
+ return HttpResponseRedirect("%slogout/" % settings.PGAUTH_REDIRECT)
# Receive an authentication response from the main website and try
# to log the user in.
def auth_receive(request):
- if request.GET.has_key('s') and request.GET['s'] == "logout":
- # This was a logout request
- return HttpResponseRedirect('/')
+ if request.GET.has_key('s') and request.GET['s'] == "logout":
+ # This was a logout request
+ return HttpResponseRedirect('/')
- if not request.GET.has_key('i'):
- raise Exception("Missing IV")
- if not request.GET.has_key('d'):
- raise Exception("Missing data!")
+ if not request.GET.has_key('i'):
+ raise Exception("Missing IV")
+ if not request.GET.has_key('d'):
+ raise Exception("Missing data!")
- # Set up an AES object and decrypt the data we received
- decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY),
- AES.MODE_CBC,
- base64.b64decode(str(request.GET['i']), "-_"))
- s = decryptor.decrypt(base64.b64decode(str(request.GET['d']), "-_")).rstrip(' ')
+ # Set up an AES object and decrypt the data we received
+ decryptor = AES.new(base64.b64decode(settings.PGAUTH_KEY),
+ AES.MODE_CBC,
+ base64.b64decode(str(request.GET['i']), "-_"))
+ s = decryptor.decrypt(base64.b64decode(str(request.GET['d']), "-_")).rstrip(' ')
- # Now un-urlencode it
- try:
- data = urlparse.parse_qs(s, strict_parsing=True)
- except ValueError, e:
- raise Exception("Invalid encrypted data received.")
+ # Now un-urlencode it
+ try:
+ data = urlparse.parse_qs(s, strict_parsing=True)
+ except ValueError, e:
+ raise Exception("Invalid encrypted data received.")
- # Check the timestamp in the authentication
- if (int(data['t'][0]) < time.time() - 10):
- raise Exception("Authentication token too old.")
+ # Check the timestamp in the authentication
+ if (int(data['t'][0]) < time.time() - 10):
+ raise Exception("Authentication token too old.")
- # Update the user record (if any)
- try:
- user = User.objects.get(username=data['u'][0])
- # User found, let's see if any important fields have changed
- changed = False
- if user.first_name != data['f'][0]:
- user.first_name = data['f'][0]
- changed = True
- if user.last_name != data['l'][0]:
- user.last_name = data['l'][0]
- changed = True
- if user.email != data['e'][0]:
- user.email = data['e'][0]
- changed= True
- if changed:
- user.save()
- except User.DoesNotExist, e:
- # User not found, create it!
- user = User(username=data['u'][0],
- first_name=data['f'][0],
- last_name=data['l'][0],
- email=data['e'][0],
- password='setbypluginnotasha1',
- )
- user.save()
+ # Update the user record (if any)
+ try:
+ user = User.objects.get(username=data['u'][0])
+ # User found, let's see if any important fields have changed
+ changed = False
+ if user.first_name != data['f'][0]:
+ user.first_name = data['f'][0]
+ changed = True
+ if user.last_name != data['l'][0]:
+ user.last_name = data['l'][0]
+ changed = True
+ if user.email != data['e'][0]:
+ user.email = data['e'][0]
+ changed= True
+ if changed:
+ user.save()
+ except User.DoesNotExist, e:
+ # User not found, create it!
+ user = User(username=data['u'][0],
+ first_name=data['f'][0],
+ last_name=data['l'][0],
+ email=data['e'][0],
+ password='setbypluginnotasha1',
+ )
+ user.save()
- # Ok, we have a proper user record. Now tell django that
- # we're authenticated so it persists it in the session. Before
- # we do that, we have to annotate it with the backend information.
- user.backend = "%s.%s" % (AuthBackend.__module__, AuthBackend.__name__)
- django_login(request, user)
+ # Ok, we have a proper user record. Now tell django that
+ # we're authenticated so it persists it in the session. Before
+ # we do that, we have to annotate it with the backend information.
+ user.backend = "%s.%s" % (AuthBackend.__module__, AuthBackend.__name__)
+ django_login(request, user)
- # Finally, redirect the user
- if data.has_key('su'):
- return HttpResponseRedirect(data['su'][0])
- # No redirect specified, see if we have it in our settings
- if hasattr(settings, 'PGAUTH_REDIRECT_SUCCESS'):
- return HttpResponseRedirect(settings.PGAUTH_REDIRECT_SUCCESS)
- raise Exception("Authentication successful, but don't know where to redirect!")
+ # Finally, redirect the user
+ if data.has_key('su'):
+ return HttpResponseRedirect(data['su'][0])
+ # No redirect specified, see if we have it in our settings
+ if hasattr(settings, 'PGAUTH_REDIRECT_SUCCESS'):
+ return HttpResponseRedirect(settings.PGAUTH_REDIRECT_SUCCESS)
+ raise Exception("Authentication successful, but don't know where to redirect!")
import datetime
class AllReposFeed(Feed):
- title = "pggit - all repositories"
- link = "/"
- description = "All pggit repositories, including those not approved yet"
- description_template = "feeds/all.tmpl"
+ title = "pggit - all repositories"
+ link = "/"
+ description = "All pggit repositories, including those not approved yet"
+ description_template = "feeds/all.tmpl"
- def items(self):
- return Repository.objects.all().order_by('repoid')
-
- def item_link(self, repo):
- return "https://git.postgresql.org/gitweb/%s" % repo
+ def items(self):
+ return Repository.objects.all().order_by('repoid')
+
+ def item_link(self, repo):
+ return "https://git.postgresql.org/gitweb/%s" % repo
MANAGERS = ADMINS
DATABASES = {
- 'default': {
- 'ENGINE': 'django.db.backends.postgresql_psycopg2',
- 'NAME': 'pggit',
- 'USER': 'pggit',
- }
- }
+ 'default': {
+ 'ENGINE': 'django.db.backends.postgresql_psycopg2',
+ 'NAME': 'pggit',
+ 'USER': 'pggit',
+ }
+ }
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
- 'django.contrib.messages.middleware.MessageMiddleware',
+ 'django.contrib.messages.middleware.MessageMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'gitadmin.urls'
TEMPLATES = [{
- 'BACKEND': 'django.template.backends.django.DjangoTemplates',
- 'OPTIONS': {
- 'context_processors': [
- 'django.template.context_processors.request',
- 'django.contrib.auth.context_processors.auth',
- 'django.contrib.messages.context_processors.messages',
- ],
- 'loaders': [
- 'django.template.loaders.filesystem.Loader',
- 'django.template.loaders.app_directories.Loader',
- ],
- },
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'OPTIONS': {
+ 'context_processors': [
+ 'django.template.context_processors.request',
+ 'django.contrib.auth.context_processors.auth',
+ 'django.contrib.messages.context_processors.messages',
+ ],
+ 'loaders': [
+ 'django.template.loaders.filesystem.Loader',
+ 'django.template.loaders.app_directories.Loader',
+ ],
+ },
}]
INSTALLED_APPS = (
)
AUTHENTICATION_BACKENDS = (
- 'gitadmin.auth.AuthBackend',
+ 'gitadmin.auth.AuthBackend',
)
TEMPLATE_CONTEXT_PROCESSORS = (
- "django.contrib.auth.context_processors.auth",
- "django.contrib.messages.context_processors.messages",
- "django.core.context_processors.media",
- 'gitadmin.adm.views.context_add',
+ "django.contrib.auth.context_processors.auth",
+ "django.contrib.messages.context_processors.messages",
+ "django.core.context_processors.media",
+ 'gitadmin.adm.views.context_add',
)
LOGIN_URL = '/adm/login'
# If there is a local_settings.py, let it override our settings
try:
- from local_settings import *
+ from local_settings import *
except:
- pass
+ pass
from util.LockFile import LockFile
class TabRemover(object):
- """
- Trivial class that removes leading tabs from each row of a file
- being read.
- """
- def __init__(self, filename):
- self.f = open(filename)
+ """
+ Trivial class that removes leading tabs from each row of a file
+ being read.
+ """
+ def __init__(self, filename):
+ self.f = open(filename)
- def readline(self):
- return self.f.readline().lstrip("\t")
+ def readline(self):
+ return self.f.readline().lstrip("\t")
class AuthorizedKeysDumper(object):
- def __init__(self, db, conf):
- self.db = db
- self.conf = conf
-
- def dump(self):
- self.dumpkeys()
- self.dumprepos()
-
- def dumpkeys(self):
- # FIXME: use a trigger to indicate if *anything at all* has changed
- curs = self.db.cursor()
- curs.execute("SELECT userid,sshkey FROM git_users ORDER BY userid")
- f = open("%s/.ssh/authorized_keys.tmp" % self.conf.get("paths", "githome"), "w")
- for userid,sshkey in curs:
- for key in sshkey.split("\n"):
- f.write("command=\"%s %s\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n" % (self.conf.get("paths", "pggit"), userid, key))
- f.close()
- os.chmod("%s/.ssh/authorized_keys.tmp" % self.conf.get("paths", "githome"), 0600)
- os.rename("%s/.ssh/authorized_keys.tmp" % self.conf.get("paths", "githome"), "%s/.ssh/authorized_keys" % self.conf.get("paths", "githome"))
-
- def dumprepos(self):
- # FIXME: use a trigger to indicate if *anything at all* has changed
- allrepos = {}
- curs = self.db.cursor()
- curs.execute("""
+ def __init__(self, db, conf):
+ self.db = db
+ self.conf = conf
+
+ def dump(self):
+ self.dumpkeys()
+ self.dumprepos()
+
+ def dumpkeys(self):
+ # FIXME: use a trigger to indicate if *anything at all* has changed
+ curs = self.db.cursor()
+ curs.execute("SELECT userid,sshkey FROM git_users ORDER BY userid")
+ f = open("%s/.ssh/authorized_keys.tmp" % self.conf.get("paths", "githome"), "w")
+ for userid,sshkey in curs:
+ for key in sshkey.split("\n"):
+ f.write("command=\"%s %s\",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n" % (self.conf.get("paths", "pggit"), userid, key))
+ f.close()
+ os.chmod("%s/.ssh/authorized_keys.tmp" % self.conf.get("paths", "githome"), 0600)
+ os.rename("%s/.ssh/authorized_keys.tmp" % self.conf.get("paths", "githome"), "%s/.ssh/authorized_keys" % self.conf.get("paths", "githome"))
+
+ def dumprepos(self):
+ # FIXME: use a trigger to indicate if *anything at all* has changed
+ allrepos = {}
+ curs = self.db.cursor()
+ curs.execute("""
SELECT name,anonymous,web,description,initialclone,tabwidth,
- COALESCE(
- (SELECT min(first_name || ' ' || last_name) FROM repository_permissions AS rp
- LEFT JOIN auth_user AS au ON au.username=rp.userid
- WHERE rp.level=2 AND rp.repository=r.repoid),''),
- CASE WHEN EXISTS
- (SELECT * FROM remoterepositories WHERE remoterepositories.id=r.remoterepository_id)
- THEN 1 ELSE 0 END
+ COALESCE(
+ (SELECT min(first_name || ' ' || last_name) FROM repository_permissions AS rp
+ LEFT JOIN auth_user AS au ON au.username=rp.userid
+ WHERE rp.level=2 AND rp.repository=r.repoid),''),
+ CASE WHEN EXISTS
+ (SELECT * FROM remoterepositories WHERE remoterepositories.id=r.remoterepository_id)
+ THEN 1 ELSE 0 END
FROM repositories AS r WHERE approved ORDER BY name""")
- f = open("%s.tmp" % self.conf.get("paths", "gitweblist"), "w")
- accessfile = open("%s.tmp" % self.conf.get("paths", "lighttpdconf"), "w")
- accessfile.write("alias.url += (\n")
-
- for name, anon, web, description, initialclone, tabwidth, owner, remoterepo in curs:
- allrepos[name] = 1
- repopath = "%s/repos/%s.git" % (self.conf.get("paths", "githome"), name)
-
- # If this is a remote repository, don't try to create it if it's not there -
- # this is handled by the repository importer.
- if remoterepo and not os.path.isdir(repopath):
- continue
-
- # Check if this repository exists at all
- if not os.path.isdir(repopath):
- # Does not exist, let's initialize a new one
-
- # Does the parent directory exist? Needed for things like /user/foo/repo.git
- parentpath = os.path.normpath(os.path.join(repopath, os.pardir))
- if not os.path.isdir(parentpath):
- # Parent does not exist, create it
- os.makedirs(parentpath)
-
- os.environ['GIT_DIR'] = repopath
- if initialclone:
- print "Initializing git into %s (cloned repo %s)" % (name, initialclone)
- if initialclone.startswith('git://'):
- # Just use the raw URL, expect approver to have validated it
- oldrepo = initialclone
- else:
- # This is a local reference, so rewrite it based on our root
- oldrepo = "%s/repos/%s.git" % (self.conf.get("paths", "githome"), initialclone)
- os.system("git clone --bare %s %s/repos/%s.git" % (
- # Old repo
- oldrepo,
- # New repo
- self.conf.get("paths", "githome"), name,
- ))
- else:
- print "Initializing new git repository %s" % name
- os.system("git init --bare --shared")
- del os.environ['GIT_DIR']
-
- # Check for publishing options here
- if web:
- f.write("%s.git %s\n" % (urllib.quote_plus(name), urllib.quote_plus(owner)))
- df = open("%s/description" % repopath, "w")
- df.write(description)
- df.close()
- # Check if we need to change the tab width (default is 8)
- repoconf = ConfigParser.ConfigParser()
- repoconf.readfp(TabRemover("%s/config" % repopath))
- tabwidth_mod = False
- if repoconf.has_option('gitweb', 'tabwidth'):
- if tabwidth != int(repoconf.get('gitweb', 'tabwidth')):
- tabwidth_mod = True
- else:
- # Not specified, so it's 8...
- if tabwidth != 8:
- tabwidth_mod = True
- if tabwidth_mod:
- if not repoconf.has_section('gitweb'):
- repoconf.add_section('gitweb')
- repoconf.set('gitweb', 'tabwidth', tabwidth)
- cf = open("%s/config" % repopath, "wb")
- repoconf.write(cf)
- cf.close()
-
- anonfile = "%s/git-daemon-export-ok" % repopath
- if anon:
- if not os.path.isfile(anonfile):
- open(anonfile, "w").close()
- # When anonymous access is allowed, create an entry so
- # we can access it with http git.
- accessfile.write(' "/git/%s.git/" => "%s/",' % (name, repopath))
- accessfile.write("\n")
- else:
- if os.path.isfile(anonfile):
- os.remove(anonfile)
-
- f.close()
- os.chmod("%s.tmp" % self.conf.get("paths", "gitweblist"), 0644)
- os.rename("%s.tmp" % self.conf.get("paths", "gitweblist"), self.conf.get("paths", "gitweblist"))
-
- accessfile.write(")\n")
- accessfile.close()
- # Only rewrite the access file if it is actually different. And if
- # it is, we need to also reload lighttpd at this point.
- if os.path.isfile(self.conf.get("paths", "lighttpdconf")) and filecmp.cmp(
- self.conf.get("paths", "lighttpdconf"),
- "%s.tmp" % self.conf.get("paths", "lighttpdconf")):
- # No changes, so just get rid of the temp file
- os.remove("%s.tmp" % self.conf.get("paths", "lighttpdconf"))
- else:
- # File changed, so we need to overwrite the old one *and*
- # reload lighttpd so the changes take effect.
- os.rename("%s.tmp" % self.conf.get("paths", "lighttpdconf"),
- self.conf.get("paths", "lighttpdconf"))
- os.system(self.conf.get("webserver", "reloadcommand"))
-
- # Now remove any repositories that have been deleted
- self._removerepos("%s/repos/" % self.conf.get("paths", "githome"), '/', allrepos)
-
- def _removerepos(self, rootpath, relativepath, allrepos):
- dl = os.listdir(rootpath)
- if not dl:
- # Nothing in there, perhaps we need to remove it?
- if relativepath != '/':
- print "Removing container directory %s" % rootpath
- try:
- os.rmdir("%s" % rootpath)
- except Exception,e:
- print "FAIL: unable to remove container directory: %s" % e
- return
-
- for d in dl:
- if d.startswith('.'):
- continue
- if not d.endswith('.git'):
- # If it doesn't end in '.git', that means it's a repository container
- # and not actually a repository. So we have to recurse.
- self._removerepos(os.path.join(rootpath, d),
- os.path.join(relativepath, d),
- allrepos)
- else:
- # Ends with '.git', meaning it's a repository. Let's figure out if it should
- # be here.
- d = d[:-4]
- if not allrepos.has_key(os.path.join(relativepath, d)[1:]):
- print "Removing repository %s" % os.path.join(relativepath, d)
- try:
- shutil.rmtree("%s.git" % os.path.join(rootpath, d))
- except Exception,e:
- print "FAIL: unable to remove directory: %s" % e
+ f = open("%s.tmp" % self.conf.get("paths", "gitweblist"), "w")
+ accessfile = open("%s.tmp" % self.conf.get("paths", "lighttpdconf"), "w")
+ accessfile.write("alias.url += (\n")
+
+ for name, anon, web, description, initialclone, tabwidth, owner, remoterepo in curs:
+ allrepos[name] = 1
+ repopath = "%s/repos/%s.git" % (self.conf.get("paths", "githome"), name)
+
+ # If this is a remote repository, don't try to create it if it's not there -
+ # this is handled by the repository importer.
+ if remoterepo and not os.path.isdir(repopath):
+ continue
+
+ # Check if this repository exists at all
+ if not os.path.isdir(repopath):
+ # Does not exist, let's initialize a new one
+
+ # Does the parent directory exist? Needed for things like /user/foo/repo.git
+ parentpath = os.path.normpath(os.path.join(repopath, os.pardir))
+ if not os.path.isdir(parentpath):
+ # Parent does not exist, create it
+ os.makedirs(parentpath)
+
+ os.environ['GIT_DIR'] = repopath
+ if initialclone:
+ print "Initializing git into %s (cloned repo %s)" % (name, initialclone)
+ if initialclone.startswith('git://'):
+ # Just use the raw URL, expect approver to have validated it
+ oldrepo = initialclone
+ else:
+ # This is a local reference, so rewrite it based on our root
+ oldrepo = "%s/repos/%s.git" % (self.conf.get("paths", "githome"), initialclone)
+ os.system("git clone --bare %s %s/repos/%s.git" % (
+ # Old repo
+ oldrepo,
+ # New repo
+ self.conf.get("paths", "githome"), name,
+ ))
+ else:
+ print "Initializing new git repository %s" % name
+ os.system("git init --bare --shared")
+ del os.environ['GIT_DIR']
+
+ # Check for publishing options here
+ if web:
+ f.write("%s.git %s\n" % (urllib.quote_plus(name), urllib.quote_plus(owner)))
+ df = open("%s/description" % repopath, "w")
+ df.write(description)
+ df.close()
+ # Check if we need to change the tab width (default is 8)
+ repoconf = ConfigParser.ConfigParser()
+ repoconf.readfp(TabRemover("%s/config" % repopath))
+ tabwidth_mod = False
+ if repoconf.has_option('gitweb', 'tabwidth'):
+ if tabwidth != int(repoconf.get('gitweb', 'tabwidth')):
+ tabwidth_mod = True
+ else:
+ # Not specified, so it's 8...
+ if tabwidth != 8:
+ tabwidth_mod = True
+ if tabwidth_mod:
+ if not repoconf.has_section('gitweb'):
+ repoconf.add_section('gitweb')
+ repoconf.set('gitweb', 'tabwidth', tabwidth)
+ cf = open("%s/config" % repopath, "wb")
+ repoconf.write(cf)
+ cf.close()
+
+ anonfile = "%s/git-daemon-export-ok" % repopath
+ if anon:
+ if not os.path.isfile(anonfile):
+ open(anonfile, "w").close()
+ # When anonymous access is allowed, create an entry so
+ # we can access it with http git.
+ accessfile.write(' "/git/%s.git/" => "%s/",' % (name, repopath))
+ accessfile.write("\n")
+ else:
+ if os.path.isfile(anonfile):
+ os.remove(anonfile)
+
+ f.close()
+ os.chmod("%s.tmp" % self.conf.get("paths", "gitweblist"), 0644)
+ os.rename("%s.tmp" % self.conf.get("paths", "gitweblist"), self.conf.get("paths", "gitweblist"))
+
+ accessfile.write(")\n")
+ accessfile.close()
+ # Only rewrite the access file if it is actually different. And if
+ # it is, we need to also reload lighttpd at this point.
+ if os.path.isfile(self.conf.get("paths", "lighttpdconf")) and filecmp.cmp(
+ self.conf.get("paths", "lighttpdconf"),
+ "%s.tmp" % self.conf.get("paths", "lighttpdconf")):
+ # No changes, so just get rid of the temp file
+ os.remove("%s.tmp" % self.conf.get("paths", "lighttpdconf"))
+ else:
+ # File changed, so we need to overwrite the old one *and*
+ # reload lighttpd so the changes take effect.
+ os.rename("%s.tmp" % self.conf.get("paths", "lighttpdconf"),
+ self.conf.get("paths", "lighttpdconf"))
+ os.system(self.conf.get("webserver", "reloadcommand"))
+
+ # Now remove any repositories that have been deleted
+ self._removerepos("%s/repos/" % self.conf.get("paths", "githome"), '/', allrepos)
+
+ def _removerepos(self, rootpath, relativepath, allrepos):
+ dl = os.listdir(rootpath)
+ if not dl:
+ # Nothing in there, perhaps we need to remove it?
+ if relativepath != '/':
+ print "Removing container directory %s" % rootpath
+ try:
+ os.rmdir("%s" % rootpath)
+ except Exception,e:
+ print "FAIL: unable to remove container directory: %s" % e
+ return
+
+ for d in dl:
+ if d.startswith('.'):
+ continue
+ if not d.endswith('.git'):
+ # If it doesn't end in '.git', that means it's a repository container
+ # and not actually a repository. So we have to recurse.
+ self._removerepos(os.path.join(rootpath, d),
+ os.path.join(relativepath, d),
+ allrepos)
+ else:
+ # Ends with '.git', meaning it's a repository. Let's figure out if it should
+ # be here.
+ d = d[:-4]
+ if not allrepos.has_key(os.path.join(relativepath, d)[1:]):
+ print "Removing repository %s" % os.path.join(relativepath, d)
+ try:
+ shutil.rmtree("%s.git" % os.path.join(rootpath, d))
+ except Exception,e:
+ print "FAIL: unable to remove directory: %s" % e
if __name__ == "__main__":
- c = ConfigParser.ConfigParser()
- c.read("pggit.settings")
- lock = LockFile("%s/repos/.gitdump_interlock" % c.get("paths", "githome"))
- db = psycopg2.connect(c.get('database','db'))
- AuthorizedKeysDumper(db, c).dump()
+ c = ConfigParser.ConfigParser()
+ c.read("pggit.settings")
+ lock = LockFile("%s/repos/.gitdump_interlock" % c.get("paths", "githome"))
+ db = psycopg2.connect(c.get('database','db'))
+ AuthorizedKeysDumper(db, c).dump()
from Crypto.Cipher import AES
class KeySynchronizer(object):
- def __init__(self, db):
- self.db = db
+ def __init__(self, db):
+ self.db = db
- def sync(self):
- """
- Perform the synchronization.
- """
- curs = self.db.cursor()
+ def sync(self):
+ """
+ Perform the synchronization.
+ """
+ curs = self.db.cursor()
- # Record synctime at *start* of sync, in case it takes some time
- synctime = datetime.datetime.now()
+ # Record synctime at *start* of sync, in case it takes some time
+ synctime = datetime.datetime.now()
- # Fetch last sync date, and see if anything has changed since
- curs.execute("SELECT lastsync-'5 minutes'::interval FROM key_last_sync LIMIT 1")
- lastsync = curs.fetchone()[0]
+ # Fetch last sync date, and see if anything has changed since
+ curs.execute("SELECT lastsync-'5 minutes'::interval FROM key_last_sync LIMIT 1")
+ lastsync = curs.fetchone()[0]
- r = requests.get("{0}/account/auth/{1}/getkeys/{2}/".format(
- c.get('upstream', 'root'),
- c.get('upstream', 'siteid'),
- int(time.mktime(lastsync.timetuple())),
- ))
- if r.status_code != 200:
- print("API call failed: %s" % r.status_code)
- return
+ r = requests.get("{0}/account/auth/{1}/getkeys/{2}/".format(
+ c.get('upstream', 'root'),
+ c.get('upstream', 'siteid'),
+ int(time.mktime(lastsync.timetuple())),
+ ))
+ if r.status_code != 200:
+ print("API call failed: %s" % r.status_code)
+ return
- (ivs, datas) = str(r.text).split('&')
- decryptor = AES.new(base64.b64decode(c.get('upstream', 'key')),
- AES.MODE_CBC,
- base64.b64decode(ivs, "-_"))
- s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(' ')
- j = json.loads(s)
- for u in j:
- curs.execute("INSERT INTO git_users (userid, sshkey) VALUES (%(userid)s, %(key)s) ON CONFLICT (userid) DO UPDATE SET sshkey=excluded.sshkey", {
- 'userid': u['u'],
- 'key': u['s'],
- })
+ (ivs, datas) = str(r.text).split('&')
+ decryptor = AES.new(base64.b64decode(c.get('upstream', 'key')),
+ AES.MODE_CBC,
+ base64.b64decode(ivs, "-_"))
+ s = decryptor.decrypt(base64.b64decode(datas, "-_")).rstrip(' ')
+ j = json.loads(s)
+ for u in j:
+ curs.execute("INSERT INTO git_users (userid, sshkey) VALUES (%(userid)s, %(key)s) ON CONFLICT (userid) DO UPDATE SET sshkey=excluded.sshkey", {
+ 'userid': u['u'],
+ 'key': u['s'],
+ })
- # Flag our last sync time
- curs.execute("UPDATE key_last_sync SET lastsync=%s", [synctime])
+ # Flag our last sync time
+ curs.execute("UPDATE key_last_sync SET lastsync=%s", [synctime])
- self.db.commit()
+ self.db.commit()
if __name__ == "__main__":
- c = ConfigParser.ConfigParser()
- c.read("pggit.settings")
- db = psycopg2.connect(c.get('database','db'))
- KeySynchronizer(db).sync()
+ c = ConfigParser.ConfigParser()
+ c.read("pggit.settings")
+ db = psycopg2.connect(c.get('database','db'))
+ KeySynchronizer(db).sync()
WRITE_COMMANDS = ('git-receive-pack')
class Logger(object):
- def __init__(self, cfg):
- self.user = "Unknown"
- self.logfile = cfg.get('paths','logfile')
+ def __init__(self, cfg):
+ self.user = "Unknown"
+ self.logfile = cfg.get('paths','logfile')
- def log(self, message):
- f = open(self.logfile,"a")
- f.write("%s: (%s) %s" % (datetime.now(), self.user, message))
- f.write("\n")
- f.close()
+ def log(self, message):
+ f = open(self.logfile,"a")
+ f.write("%s: (%s) %s" % (datetime.now(), self.user, message))
+ f.write("\n")
+ f.close()
- def setuser(self, user):
- if user:
- self.user = user
+ def setuser(self, user):
+ if user:
+ self.user = user
class InternalException(Exception):
- pass
+ pass
class PgGit(object):
- user = None
- command = None
- path = None
- subpath = None
-
- def __init__(self, cfg):
- self.cfg = cfg
- self.logger = Logger(cfg)
- self.repoprefix = "%s/repos/" % cfg.get('paths','githome')
- if cfg.has_option('trigger', 'pushtrigger'):
- pieces = cfg.get('trigger', 'pushtrigger').split('.')
- modname = '.'.join(pieces[:-1])
- classname = pieces[-1]
- try:
- mod = __import__(modname)
- c = getattr(mod, classname)
- self.pushtrigger = c(self.cfg)
- except Exception, e:
- raise InternalException("Failed to load push trigger class: %s" % e)
- else:
- self.pushtrigger = None
-
- def parse_commandline(self):
- if len(sys.argv) != 2:
- raise InternalException("Can only be run with one commandline argument!")
- self.user = sys.argv[1]
- self.logger.setuser(self.user)
-
- def parse_command(self):
- env = os.environ.get('SSH_ORIGINAL_COMMAND', None)
- if not env:
- raise InternalException("No SSH_ORIGINAL_COMMAND present!")
-
- # env contains "git-<command> <argument>" or "git <command> <argument>"
- command, args = env.split(None, 1)
- if command == "git":
- subcommand, args = args.split(None,1)
- command = "git-%s" % subcommand
- if not command in ALLOWED_COMMANDS:
- raise InternalException("Command '%s' not allowed" % command)
-
- self.command = command
- if not args.startswith("'/"):
- raise InternalException("Expected git path to start with slash!")
-
- # FIXME: what about that single quote? Make sure it's there?
-
- # use os.path.normpath to make sure the user does not attempt to break out of the repository root
- self.path = os.path.normpath(("%s%s" % (self.repoprefix, args[2:].rstrip("'"))))
- if not self.path.startswith(self.repoprefix):
- raise InternalException("Escaping the root directory is of course not permitted")
- if not self.path.endswith('.git'):
- raise InternalException("Git repository paths must end in .git")
- if not os.path.exists(self.path):
- raise InternalException('git repository "%s" does not exist' % args)
- self.subpath = self.path[len(self.repoprefix):-4]
-
- def check_permissions(self):
- writeperm = False
- db = psycopg2.connect(self.cfg.get('database','db'))
- curs = db.cursor()
- curs.execute("SELECT CASE WHEN remoterepository_id IS NULL THEN level ELSE 0 END FROM repository_permissions INNER JOIN repositories ON repoid=repository WHERE userid=%s AND name=%s",
- (self.user, self.subpath))
- try:
- writeperm = (curs.fetchone()[0] > 0)
- except:
- raise InternalException("Permission denied on repository for user %s" % self.user)
-
- if self.command in WRITE_COMMANDS:
- if not writeperm:
- raise InternalException("Write permission denied on repository for user %s" % self.user)
-
-
- def run_command(self):
- self.logger.log("Running \"git shell %s %s\"" % (self.command, "'%s'" % self.path))
- subprocess.call(['git', 'shell', '-c', "%s %s" % (self.command, "'%s'" % self.path)])
-
- def run(self):
- try:
- self.parse_commandline()
- self.parse_command()
- self.check_permissions()
- self.run_command()
- if self.pushtrigger:
- if self.command in WRITE_COMMANDS:
- self.pushtrigger.pushtrigger(self.subpath, self.user)
-
- except InternalException, e:
- try:
- self.logger.log(e)
- except Exception, e:
- pass
- sys.stderr.write("%s\n" % e)
- sys.exit(1)
- except Exception, e:
- try:
- self.logger.log(e)
- except Exception, e:
- # If we failed to log, try once more with a new logger, otherwise,
- # just accept that we failed.
- try:
- Logger().log(e)
- except:
- pass
- sys.stderr.write("An unhandled exception occurred on the server\n")
- sys.exit(1)
+ user = None
+ command = None
+ path = None
+ subpath = None
+
+ def __init__(self, cfg):
+ self.cfg = cfg
+ self.logger = Logger(cfg)
+ self.repoprefix = "%s/repos/" % cfg.get('paths','githome')
+ if cfg.has_option('trigger', 'pushtrigger'):
+ pieces = cfg.get('trigger', 'pushtrigger').split('.')
+ modname = '.'.join(pieces[:-1])
+ classname = pieces[-1]
+ try:
+ mod = __import__(modname)
+ c = getattr(mod, classname)
+ self.pushtrigger = c(self.cfg)
+ except Exception, e:
+ raise InternalException("Failed to load push trigger class: %s" % e)
+ else:
+ self.pushtrigger = None
+
+ def parse_commandline(self):
+ if len(sys.argv) != 2:
+ raise InternalException("Can only be run with one commandline argument!")
+ self.user = sys.argv[1]
+ self.logger.setuser(self.user)
+
+ def parse_command(self):
+ env = os.environ.get('SSH_ORIGINAL_COMMAND', None)
+ if not env:
+ raise InternalException("No SSH_ORIGINAL_COMMAND present!")
+
+ # env contains "git-<command> <argument>" or "git <command> <argument>"
+ command, args = env.split(None, 1)
+ if command == "git":
+ subcommand, args = args.split(None,1)
+ command = "git-%s" % subcommand
+ if not command in ALLOWED_COMMANDS:
+ raise InternalException("Command '%s' not allowed" % command)
+
+ self.command = command
+ if not args.startswith("'/"):
+ raise InternalException("Expected git path to start with slash!")
+
+ # FIXME: what about that single quote? Make sure it's there?
+
+ # use os.path.normpath to make sure the user does not attempt to break out of the repository root
+ self.path = os.path.normpath(("%s%s" % (self.repoprefix, args[2:].rstrip("'"))))
+ if not self.path.startswith(self.repoprefix):
+ raise InternalException("Escaping the root directory is of course not permitted")
+ if not self.path.endswith('.git'):
+ raise InternalException("Git repository paths must end in .git")
+ if not os.path.exists(self.path):
+ raise InternalException('git repository "%s" does not exist' % args)
+ self.subpath = self.path[len(self.repoprefix):-4]
+
+ def check_permissions(self):
+ writeperm = False
+ db = psycopg2.connect(self.cfg.get('database','db'))
+ curs = db.cursor()
+ curs.execute("SELECT CASE WHEN remoterepository_id IS NULL THEN level ELSE 0 END FROM repository_permissions INNER JOIN repositories ON repoid=repository WHERE userid=%s AND name=%s",
+ (self.user, self.subpath))
+ try:
+ writeperm = (curs.fetchone()[0] > 0)
+ except:
+ raise InternalException("Permission denied on repository for user %s" % self.user)
+
+ if self.command in WRITE_COMMANDS:
+ if not writeperm:
+ raise InternalException("Write permission denied on repository for user %s" % self.user)
+
+
+ def run_command(self):
+ self.logger.log("Running \"git shell %s %s\"" % (self.command, "'%s'" % self.path))
+ subprocess.call(['git', 'shell', '-c', "%s %s" % (self.command, "'%s'" % self.path)])
+
+ def run(self):
+ try:
+ self.parse_commandline()
+ self.parse_command()
+ self.check_permissions()
+ self.run_command()
+ if self.pushtrigger:
+ if self.command in WRITE_COMMANDS:
+ self.pushtrigger.pushtrigger(self.subpath, self.user)
+
+ except InternalException, e:
+ try:
+ self.logger.log(e)
+ except Exception, e:
+ pass
+ sys.stderr.write("%s\n" % e)
+ sys.exit(1)
+ except Exception, e:
+ try:
+ self.logger.log(e)
+ except Exception, e:
+ # If we failed to log, try once more with a new logger, otherwise,
+ # just accept that we failed.
+ try:
+ Logger().log(e)
+ except:
+ pass
+ sys.stderr.write("An unhandled exception occurred on the server\n")
+ sys.exit(1)
if __name__ == "__main__":
- c = ConfigParser.ConfigParser()
- c.read("%s/pggit.settings" % os.path.abspath(sys.path[0]))
- PgGit(c).run()
+ c = ConfigParser.ConfigParser()
+ c.read("%s/pggit.settings" % os.path.abspath(sys.path[0]))
+ PgGit(c).run()
from util.LockFile import LockFile
class RepoSync(object):
- def __init__(self, db, conf):
- self.db = db
- self.conf = conf
+ def __init__(self, db, conf):
+ self.db = db
+ self.conf = conf
- def sync(self):
- curs = self.db.cursor()
- curs.execute("""
+ def sync(self):
+ curs = self.db.cursor()
+ curs.execute("""
SELECT id,name,repotype_id,remoteurl,remotemodule,lastsynced
FROM repositories
INNER JOIN remoterepositories ON repositories.remoterepository_id=remoterepositories.id
WHERE approved ORDER BY name
""")
- for id,name,repotype,remoteurl,remotemodule,lastsynced in curs:
- if name.find('/')>0:
- print "Subdirectories not supported when synchronizing"
- continue
- s = SyncMethod.get(repotype)
- s.init(self.conf, name,remoteurl,remotemodule)
- s.sync()
- s.finalize()
- c2 = self.db.cursor()
- c2.execute("UPDATE remoterepositories SET lastsynced=CURRENT_TIMESTAMP WHERE id=%s", (id, ))
- self.db.commit()
-
+ for id,name,repotype,remoteurl,remotemodule,lastsynced in curs:
+ if name.find('/')>0:
+ print "Subdirectories not supported when synchronizing"
+ continue
+ s = SyncMethod.get(repotype)
+ s.init(self.conf, name,remoteurl,remotemodule)
+ s.sync()
+ s.finalize()
+ c2 = self.db.cursor()
+ c2.execute("UPDATE remoterepositories SET lastsynced=CURRENT_TIMESTAMP WHERE id=%s", (id, ))
+ self.db.commit()
+
class Callable:
- def __init__(self, anycallable):
- self.__call__ = anycallable
+ def __init__(self, anycallable):
+ self.__call__ = anycallable
class SyncMethod(object):
- def get(repotype):
- if repotype == "cvs":
- return SyncMethodCvs()
- if repotype == "rsynccvs":
- return SyncMethodRsyncCvs()
- if repotype == "git":
- return SyncMethodGit()
- raise Exception("No implementation for repository type %s found" % repotype)
- get = Callable(get)
-
- def __init__(self):
- self.name = self.remoteurl = self.remotemodule = None
-
- def init(self, conf, name, remoteurl, remotemodule):
- self.conf = conf
- self.name = name
- self.remoteurl = remoteurl
- self.remotemodule = remotemodule
- self.repopath = "%s/repos/%s.git" % (self.conf.get("paths", "githome"), self.name)
-
- os.environ['GIT_DIR'] = self.repopath
-
- def sync(self):
- if not os.path.isdir(self.repopath):
- self.initialsync()
- else:
- self.normalsync()
-
- def initialsync(self):
- raise NotImplementedError("sync method not implemented")
-
- def normalsync(self):
- raise NotImplementedError("sync method not implemented")
-
- def finalize(self):
- savedir = os.getcwd()
- os.chdir(self.repopath)
- self.system("git update-server-info")
- os.chdir(savedir)
- if os.environ.has_key('GIT_DIR'):
- del os.environ['GIT_DIR']
-
- def system(self, cmd):
- # Version of os.system() that raises an exception if the command
- # fails to run or returns with bad exit code.
- r = os.system(cmd)
- if r != 0:
- raise Exception("Failed to execute \"%s\": %s" % (cmd, r))
- return 0
-
-
+ def get(repotype):
+ if repotype == "cvs":
+ return SyncMethodCvs()
+ if repotype == "rsynccvs":
+ return SyncMethodRsyncCvs()
+ if repotype == "git":
+ return SyncMethodGit()
+ raise Exception("No implementation for repository type %s found" % repotype)
+ get = Callable(get)
+
+ def __init__(self):
+ self.name = self.remoteurl = self.remotemodule = None
+
+ def init(self, conf, name, remoteurl, remotemodule):
+ self.conf = conf
+ self.name = name
+ self.remoteurl = remoteurl
+ self.remotemodule = remotemodule
+ self.repopath = "%s/repos/%s.git" % (self.conf.get("paths", "githome"), self.name)
+
+ os.environ['GIT_DIR'] = self.repopath
+
+ def sync(self):
+ if not os.path.isdir(self.repopath):
+ self.initialsync()
+ else:
+ self.normalsync()
+
+ def initialsync(self):
+ raise NotImplementedError("sync method not implemented")
+
+ def normalsync(self):
+ raise NotImplementedError("sync method not implemented")
+
+ def finalize(self):
+ savedir = os.getcwd()
+ os.chdir(self.repopath)
+ self.system("git update-server-info")
+ os.chdir(savedir)
+ if os.environ.has_key('GIT_DIR'):
+ del os.environ['GIT_DIR']
+
+ def system(self, cmd):
+ # Version of os.system() that raises an exception if the command
+ # fails to run or returns with bad exit code.
+ r = os.system(cmd)
+ if r != 0:
+ raise Exception("Failed to execute \"%s\": %s" % (cmd, r))
+ return 0
+
+
class SyncMethodCvs(SyncMethod):
# Synchronize using "git cvsimport", which deals with remove CVS repositories
# but apparantly does not deal with branches very well.
- def initialsync(self):
- # git cvsimport is evil. The first time, we need to let it create
- # a non-bare repository. Otherwise it creates one inside the bare one
- del os.environ['GIT_DIR']
- self.normalsync()
- # Now turn this into a bare repository
- for n in os.listdir("%s/.git" % self.repopath):
- shutil.move(
- "%s/.git/%s" % (self.repopath, n),
- "%s/%s" % (self.repopath, n)
- )
- os.rmdir("%s/.git" % self.repopath)
-
- def normalsync(self):
- # Not initial sync, so just do a sync
- self.system("git cvsimport -v -d %s -r master -C %s -i -k %s" % (
- # CVS url
- self.remoteurl,
- # New repo
- self.repopath,
- # cvs module
- self.remotemodule,
- ))
+ def initialsync(self):
+ # git cvsimport is evil. The first time, we need to let it create
+ # a non-bare repository. Otherwise it creates one inside the bare one
+ del os.environ['GIT_DIR']
+ self.normalsync()
+ # Now turn this into a bare repository
+ for n in os.listdir("%s/.git" % self.repopath):
+ shutil.move(
+ "%s/.git/%s" % (self.repopath, n),
+ "%s/%s" % (self.repopath, n)
+ )
+ os.rmdir("%s/.git" % self.repopath)
+
+ def normalsync(self):
+ # Not initial sync, so just do a sync
+ self.system("git cvsimport -v -d %s -r master -C %s -i -k %s" % (
+ # CVS url
+ self.remoteurl,
+ # New repo
+ self.repopath,
+ # cvs module
+ self.remotemodule,
+ ))
class SyncMethodRsyncCvs(SyncMethod):
# Rsync a cvs repository, and then use fromcvs to convert it to git.
# This is really only used for the main repository
- def initialsync(self):
- # We really only use this for the main repo, so way too lazy to set
- # this up now. Do it manually ;-)
- raise NotImplementedError("Sorry, initial sync for rsync-cvs not implemented")
-
- def normalsync(self):
- rsyncpath = "%s/rsyncsrc/%s" % (self.conf.get("paths", "githome"), self.name)
-
- # First, rsync the cvs repository
- self.system("rsync -azCH --delete %s %s" % (
- self.remoteurl,
- rsyncpath
- ))
-
- # If an authormap exists for this repository, copy it over now. The
- # rsync process will remove it again, so we need to redo this after
- # each time we rsync.
- if os.path.isfile("%s/authormap/%s" % (
- self.conf.get("paths", "githome"), self.name)):
- shutil.copyfile(
- "%s/authormap/%s" % (
- self.conf.get("paths", "githome"), self.name),
- "%s/CVSROOT/authormap" % rsyncpath)
-
-
- # Now perform Git Import Magic (TM)
- savedir = os.getcwd()
- os.chdir("%s/sw/fromcvs" % self.conf.get("paths", "githome"))
-
- # Perform Magic!
- self.system("ruby togit.rb %s %s %s" % (
- rsyncpath,
- self.remotemodule,
- self.repopath,
- ))
-
- # Repack changes
- os.chdir(self.repopath)
- self.system("git repack -f -d")
-
- # Restore working dir
- os.chdir(savedir)
+ def initialsync(self):
+ # We really only use this for the main repo, so way too lazy to set
+ # this up now. Do it manually ;-)
+ raise NotImplementedError("Sorry, initial sync for rsync-cvs not implemented")
+
+ def normalsync(self):
+ rsyncpath = "%s/rsyncsrc/%s" % (self.conf.get("paths", "githome"), self.name)
+
+ # First, rsync the cvs repository
+ self.system("rsync -azCH --delete %s %s" % (
+ self.remoteurl,
+ rsyncpath
+ ))
+
+ # If an authormap exists for this repository, copy it over now. The
+ # rsync process will remove it again, so we need to redo this after
+ # each time we rsync.
+ if os.path.isfile("%s/authormap/%s" % (
+ self.conf.get("paths", "githome"), self.name)):
+ shutil.copyfile(
+ "%s/authormap/%s" % (
+ self.conf.get("paths", "githome"), self.name),
+ "%s/CVSROOT/authormap" % rsyncpath)
+
+
+ # Now perform Git Import Magic (TM)
+ savedir = os.getcwd()
+ os.chdir("%s/sw/fromcvs" % self.conf.get("paths", "githome"))
+
+ # Perform Magic!
+ self.system("ruby togit.rb %s %s %s" % (
+ rsyncpath,
+ self.remotemodule,
+ self.repopath,
+ ))
+
+ # Repack changes
+ os.chdir(self.repopath)
+ self.system("git repack -f -d")
+
+ # Restore working dir
+ os.chdir(savedir)
class SyncMethodGit(SyncMethod):
# Sync with a remote git repository.
- def initialsync(self):
- self.system("git clone --no-checkout --bare %s %s" % (
- self.remoteurl,
- self.repopath
- ))
-
- def normalsync(self):
- savedir = os.getcwd()
- os.chdir(self.repopath)
- del os.environ['GIT_DIR']
- self.system("git fetch -u %s master:master" % self.remoteurl)
- os.chdir(savedir)
+ def initialsync(self):
+ self.system("git clone --no-checkout --bare %s %s" % (
+ self.remoteurl,
+ self.repopath
+ ))
+
+ def normalsync(self):
+ savedir = os.getcwd()
+ os.chdir(self.repopath)
+ del os.environ['GIT_DIR']
+ self.system("git fetch -u %s master:master" % self.remoteurl)
+ os.chdir(savedir)
if __name__ == "__main__":
- c = ConfigParser.ConfigParser()
- c.read("pggit.settings")
- lock = LockFile("%s/repos/.reposync_interlock" % c.get("paths", "githome"))
- db = psycopg2.connect(c.get('database','db'))
- RepoSync(db, c).sync()
+ c = ConfigParser.ConfigParser()
+ c.read("pggit.settings")
+ lock = LockFile("%s/repos/.reposync_interlock" % c.get("paths", "githome"))
+ db = psycopg2.connect(c.get('database','db'))
+ RepoSync(db, c).sync()
class test(object):
- def __init__(self, cfg):
- pass
+ def __init__(self, cfg):
+ pass
- def pushtrigger(self, reponame, username):
- print "Firing push trigger for repository '%s', due to push by %s" % (reponame, username)
+ def pushtrigger(self, reponame, username):
+ print "Firing push trigger for repository '%s', due to push by %s" % (reponame, username)
import httplib
class varnishpurger(object):
- """
- Push trigger that purges repositories from varnish. The idea being that
- the repositories follow a standard gitweb url structure, and we purge
- all pages related to that repository.
-
- Requires the config variable "host" set in the section "varnishpurge".
- This can be an IP address (typically 127.0.0.1) or IP address + port
- (typically 127.0.0.1:81). The trigger will always post to the URL
- "/varnish-purge-url", and will include the URL to purge in the form of
- a regular expression in the custom header X-Purge-URL.
- """
- def __init__(self, cfg):
- self.host = cfg.get('varnishpurge', 'host')
-
- def pushtrigger(self, reponame, username):
- # Make a callback to a local varnish server to purge a repository
- # from it. Assumes gitweb style URLs.
- # Also, purge the actual http serving git repo itself.
- for u in ['^/gitweb/?$',
- '^/gitweb/\?p=%s.git' % reponame,
- '^/git/%s' % reponame]:
- if not self._internal_purge(u):
- print "Varnish purge failed, website may become slightly out of date"
- return
-
-
- def _internal_purge(self, url):
- try:
- conn = httplib.HTTPConnection(self.host)
- conn.request("GET", "/varnish-purge-url", '', {'X-Purge-URL': url})
- resp = conn.getresponse()
- conn.close()
- if resp.status == 200:
- return True
- return False
- except Exception, ex:
- return False
+ """
+ Push trigger that purges repositories from varnish. The idea being that
+ the repositories follow a standard gitweb url structure, and we purge
+ all pages related to that repository.
+
+ Requires the config variable "host" set in the section "varnishpurge".
+ This can be an IP address (typically 127.0.0.1) or IP address + port
+ (typically 127.0.0.1:81). The trigger will always post to the URL
+ "/varnish-purge-url", and will include the URL to purge in the form of
+ a regular expression in the custom header X-Purge-URL.
+ """
+ def __init__(self, cfg):
+ self.host = cfg.get('varnishpurge', 'host')
+
+ def pushtrigger(self, reponame, username):
+ # Make a callback to a local varnish server to purge a repository
+ # from it. Assumes gitweb style URLs.
+ # Also, purge the actual http serving git repo itself.
+ for u in ['^/gitweb/?$',
+ '^/gitweb/\?p=%s.git' % reponame,
+ '^/git/%s' % reponame]:
+ if not self._internal_purge(u):
+ print "Varnish purge failed, website may become slightly out of date"
+ return
+
+
+ def _internal_purge(self, url):
+ try:
+ conn = httplib.HTTPConnection(self.host)
+ conn.request("GET", "/varnish-purge-url", '', {'X-Purge-URL': url})
+ resp = conn.getresponse()
+ conn.close()
+ if resp.status == 200:
+ return True
+ return False
+ except Exception, ex:
+ return False
import os
class LockFile:
- def __init__(self, filename):
- self.filename = None
- if os.path.isfile(filename):
- raise Exception("Script is already running (says interlock file %s)" %
- filename)
- self.filename = filename
- f = open(self.filename, "w")
- f.writelines(('Interlock file', ))
- f.close()
+ def __init__(self, filename):
+ self.filename = None
+ if os.path.isfile(filename):
+ raise Exception("Script is already running (says interlock file %s)" %
+ filename)
+ self.filename = filename
+ f = open(self.filename, "w")
+ f.writelines(('Interlock file', ))
+ f.close()
- def __del__(self):
- if self.filename:
- os.remove(self.filename)
+ def __del__(self):
+ if self.filename:
+ os.remove(self.filename)