DHT: Replace TinyProtobuf with Google Protocol Buffers

The standard Google distribution of Protocol Buffers in Java is better
maintained than TinyProtobuf, and should be faster for most uses.  It
does use slightly more memory due to many of our key types being
stored as strings in protobuf messages, but this is probably worth the
small hit to memory in exchange for better maintained code that is
easier to reuse in other applications.

Exposing all of our data members to the underlying implementation
makes it easier to develop reporting and data mining tools, or to
expand out a nested structure like RefData into a flat format in a SQL
database table.

Since the C++ `protoc` tool is necessary to convert the protobuf
script into Java code, the generated files are committed as part of
the source repository to make it easier for developers who do not have
this tool installed to still build the overall JGit package and make
use of it.  Reviewers will need to be careful to ensure that any edits
made to a *.proto file come in a commit that also updates the
generated code to match.

CQ: 5135
Change-Id: I53e11e82c186b9cf0d7b368e0276519e6a0b2893
Signed-off-by: Shawn O. Pearce <spearce@spearce.org>
Signed-off-by: Chris Aniszczyk <caniszczyk@gmail.com>
This commit is contained in:
Shawn O. Pearce 2011-05-13 07:44:42 -07:00 committed by Chris Aniszczyk
parent 7cad0adc7d
commit 6ec6169215
58 changed files with 12215 additions and 2404 deletions

View File

@ -120,3 +120,9 @@
license = Apache License, 2.0 license = Apache License, 2.0
use = unmodified source & binary use = unmodified source & binary
state = approved state = approved
[CQ "5135"]
description = Protocol Buffers Version: 2.4.0a (ATO CQ4876)
license = New BSD license
use = unmodified source & binary
state = approved

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/J2SE-1.5"/>
<classpathentry kind="con" path="org.eclipse.pde.core.requiredPlugins"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -0,0 +1,2 @@
/bin
/target

View File

@ -0,0 +1,28 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>org.eclipse.jgit.generated.storage.dht.proto</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.ManifestBuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.pde.SchemaBuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.pde.PluginNature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,3 @@
#Mon Aug 11 16:46:12 PDT 2008
eclipse.preferences.version=1
encoding/<project>=UTF-8

View File

@ -0,0 +1,3 @@
#Mon Mar 24 18:55:50 EDT 2008
eclipse.preferences.version=1
line.separator=\n

View File

@ -0,0 +1,349 @@
#Thu May 05 16:40:25 PDT 2011
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.5
org.eclipse.jdt.core.compiler.codegen.unusedLocal=optimize out
org.eclipse.jdt.core.compiler.compliance=1.5
org.eclipse.jdt.core.compiler.debug.lineNumber=do not generate
org.eclipse.jdt.core.compiler.debug.localVariable=do not generate
org.eclipse.jdt.core.compiler.debug.sourceFile=do not generate
org.eclipse.jdt.core.compiler.doc.comment.support=disabled
org.eclipse.jdt.core.compiler.problem.annotationSuperInterface=ignore
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.autoboxing=ignore
org.eclipse.jdt.core.compiler.problem.comparingIdentical=ignore
org.eclipse.jdt.core.compiler.problem.deadCode=ignore
org.eclipse.jdt.core.compiler.problem.deprecation=ignore
org.eclipse.jdt.core.compiler.problem.deprecationInDeprecatedCode=disabled
org.eclipse.jdt.core.compiler.problem.deprecationWhenOverridingDeprecatedMethod=disabled
org.eclipse.jdt.core.compiler.problem.discouragedReference=ignore
org.eclipse.jdt.core.compiler.problem.emptyStatement=ignore
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.fallthroughCase=ignore
org.eclipse.jdt.core.compiler.problem.fatalOptionalError=disabled
org.eclipse.jdt.core.compiler.problem.fieldHiding=ignore
org.eclipse.jdt.core.compiler.problem.finalParameterBound=ignore
org.eclipse.jdt.core.compiler.problem.finallyBlockNotCompletingNormally=ignore
org.eclipse.jdt.core.compiler.problem.forbiddenReference=ignore
org.eclipse.jdt.core.compiler.problem.hiddenCatchBlock=ignore
org.eclipse.jdt.core.compiler.problem.incompatibleNonInheritedInterfaceMethod=ignore
org.eclipse.jdt.core.compiler.problem.incompleteEnumSwitch=ignore
org.eclipse.jdt.core.compiler.problem.indirectStaticAccess=ignore
org.eclipse.jdt.core.compiler.problem.invalidJavadoc=error
org.eclipse.jdt.core.compiler.problem.invalidJavadocTags=enabled
org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsDeprecatedRef=enabled
org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsNotVisibleRef=enabled
org.eclipse.jdt.core.compiler.problem.invalidJavadocTagsVisibility=private
org.eclipse.jdt.core.compiler.problem.localVariableHiding=ignore
org.eclipse.jdt.core.compiler.problem.methodWithConstructorName=ignore
org.eclipse.jdt.core.compiler.problem.missingDeprecatedAnnotation=ignore
org.eclipse.jdt.core.compiler.problem.missingHashCodeMethod=ignore
org.eclipse.jdt.core.compiler.problem.missingJavadocComments=error
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsOverriding=disabled
org.eclipse.jdt.core.compiler.problem.missingJavadocCommentsVisibility=protected
org.eclipse.jdt.core.compiler.problem.missingJavadocTagDescription=return_tag
org.eclipse.jdt.core.compiler.problem.missingJavadocTags=error
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsOverriding=disabled
org.eclipse.jdt.core.compiler.problem.missingJavadocTagsVisibility=private
org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotation=ignore
org.eclipse.jdt.core.compiler.problem.missingOverrideAnnotationForInterfaceMethodImplementation=enabled
org.eclipse.jdt.core.compiler.problem.missingSerialVersion=ignore
org.eclipse.jdt.core.compiler.problem.missingSynchronizedOnInheritedMethod=ignore
org.eclipse.jdt.core.compiler.problem.noEffectAssignment=ignore
org.eclipse.jdt.core.compiler.problem.noImplicitStringConversion=ignore
org.eclipse.jdt.core.compiler.problem.nonExternalizedStringLiteral=ignore
org.eclipse.jdt.core.compiler.problem.nullReference=ignore
org.eclipse.jdt.core.compiler.problem.overridingPackageDefaultMethod=ignore
org.eclipse.jdt.core.compiler.problem.parameterAssignment=ignore
org.eclipse.jdt.core.compiler.problem.possibleAccidentalBooleanAssignment=ignore
org.eclipse.jdt.core.compiler.problem.potentialNullReference=ignore
org.eclipse.jdt.core.compiler.problem.rawTypeReference=ignore
org.eclipse.jdt.core.compiler.problem.redundantNullCheck=ignore
org.eclipse.jdt.core.compiler.problem.redundantSuperinterface=ignore
org.eclipse.jdt.core.compiler.problem.specialParameterHidingField=disabled
org.eclipse.jdt.core.compiler.problem.staticAccessReceiver=ignore
org.eclipse.jdt.core.compiler.problem.suppressOptionalErrors=disabled
org.eclipse.jdt.core.compiler.problem.suppressWarnings=enabled
org.eclipse.jdt.core.compiler.problem.syntheticAccessEmulation=ignore
org.eclipse.jdt.core.compiler.problem.typeParameterHiding=ignore
org.eclipse.jdt.core.compiler.problem.uncheckedTypeOperation=ignore
org.eclipse.jdt.core.compiler.problem.undocumentedEmptyBlock=ignore
org.eclipse.jdt.core.compiler.problem.unhandledWarningToken=ignore
org.eclipse.jdt.core.compiler.problem.unnecessaryElse=ignore
org.eclipse.jdt.core.compiler.problem.unnecessaryTypeCheck=ignore
org.eclipse.jdt.core.compiler.problem.unqualifiedFieldAccess=ignore
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownException=ignore
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionExemptExceptionAndThrowable=enabled
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionIncludeDocCommentReference=enabled
org.eclipse.jdt.core.compiler.problem.unusedDeclaredThrownExceptionWhenOverriding=disabled
org.eclipse.jdt.core.compiler.problem.unusedImport=ignore
org.eclipse.jdt.core.compiler.problem.unusedLabel=ignore
org.eclipse.jdt.core.compiler.problem.unusedLocal=ignore
org.eclipse.jdt.core.compiler.problem.unusedObjectAllocation=ignore
org.eclipse.jdt.core.compiler.problem.unusedParameter=ignore
org.eclipse.jdt.core.compiler.problem.unusedParameterIncludeDocCommentReference=enabled
org.eclipse.jdt.core.compiler.problem.unusedParameterWhenImplementingAbstract=disabled
org.eclipse.jdt.core.compiler.problem.unusedParameterWhenOverridingConcrete=disabled
org.eclipse.jdt.core.compiler.problem.unusedPrivateMember=ignore
org.eclipse.jdt.core.compiler.problem.unusedWarningToken=ignore
org.eclipse.jdt.core.compiler.problem.varargsArgumentNeedCast=ignore
org.eclipse.jdt.core.compiler.source=1.5
org.eclipse.jdt.core.formatter.align_type_members_on_columns=false
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation=16
org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression=16
org.eclipse.jdt.core.formatter.alignment_for_assignment=0
org.eclipse.jdt.core.formatter.alignment_for_binary_expression=16
org.eclipse.jdt.core.formatter.alignment_for_compact_if=16
org.eclipse.jdt.core.formatter.alignment_for_conditional_expression=80
org.eclipse.jdt.core.formatter.alignment_for_enum_constants=0
org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer=16
org.eclipse.jdt.core.formatter.alignment_for_multiple_fields=16
org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation=16
org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration=16
org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration=16
org.eclipse.jdt.core.formatter.blank_lines_after_imports=1
org.eclipse.jdt.core.formatter.blank_lines_after_package=1
org.eclipse.jdt.core.formatter.blank_lines_before_field=1
org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration=0
org.eclipse.jdt.core.formatter.blank_lines_before_imports=1
org.eclipse.jdt.core.formatter.blank_lines_before_member_type=1
org.eclipse.jdt.core.formatter.blank_lines_before_method=1
org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk=1
org.eclipse.jdt.core.formatter.blank_lines_before_package=0
org.eclipse.jdt.core.formatter.blank_lines_between_import_groups=1
org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations=1
org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_array_initializer=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_block=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_block_in_case=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_enum_constant=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_method_declaration=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_switch=end_of_line
org.eclipse.jdt.core.formatter.brace_position_for_type_declaration=end_of_line
org.eclipse.jdt.core.formatter.comment.clear_blank_lines=false
org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment=false
org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment=false
org.eclipse.jdt.core.formatter.comment.format_block_comments=true
org.eclipse.jdt.core.formatter.comment.format_comments=true
org.eclipse.jdt.core.formatter.comment.format_header=false
org.eclipse.jdt.core.formatter.comment.format_html=true
org.eclipse.jdt.core.formatter.comment.format_javadoc_comments=true
org.eclipse.jdt.core.formatter.comment.format_line_comments=true
org.eclipse.jdt.core.formatter.comment.format_source_code=true
org.eclipse.jdt.core.formatter.comment.indent_parameter_description=true
org.eclipse.jdt.core.formatter.comment.indent_root_tags=true
org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags=insert
org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter=insert
org.eclipse.jdt.core.formatter.comment.line_length=80
org.eclipse.jdt.core.formatter.compact_else_if=true
org.eclipse.jdt.core.formatter.continuation_indentation=2
org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer=2
org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line=false
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header=true
org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header=true
org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_empty_lines=false
org.eclipse.jdt.core.formatter.indent_statements_compare_to_block=true
org.eclipse.jdt.core.formatter.indent_statements_compare_to_body=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases=true
org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch=false
org.eclipse.jdt.core.formatter.indentation.size=4
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_member=insert
org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement=do not insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body=insert
org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter=insert
org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator=insert
org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_binary_operator=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block=insert
org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments=insert
org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters=insert
org.eclipse.jdt.core.formatter.insert_space_after_ellipsis=insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard=do not insert
org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_after_unary_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter=insert
org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator=insert
org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_binary_operator=insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_ellipsis=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized=insert
org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while=insert
org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return=insert
org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw=insert
org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional=insert
org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_semicolon=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for=do not insert
org.eclipse.jdt.core.formatter.insert_space_before_unary_operator=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration=do not insert
org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation=do not insert
org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line=false
org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line=false
org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line=false
org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line=false
org.eclipse.jdt.core.formatter.lineSplit=80
org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column=false
org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column=false
org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body=0
org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve=1
org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line=true
org.eclipse.jdt.core.formatter.tabulation.char=tab
org.eclipse.jdt.core.formatter.tabulation.size=4
org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations=false
org.eclipse.jdt.core.formatter.wrap_before_binary_operator=true

View File

@ -0,0 +1,62 @@
#Thu Aug 26 12:30:58 CDT 2010
eclipse.preferences.version=1
editor_save_participant_org.eclipse.jdt.ui.postsavelistener.cleanup=true
formatter_profile=_JGit Format
formatter_settings_version=11
org.eclipse.jdt.ui.ignorelowercasenames=true
org.eclipse.jdt.ui.importorder=java;javax;org;com;
org.eclipse.jdt.ui.ondemandthreshold=99
org.eclipse.jdt.ui.staticondemandthreshold=99
org.eclipse.jdt.ui.text.custom_code_templates=<?xml version\="1.0" encoding\="UTF-8" standalone\="no"?><templates/>
sp_cleanup.add_default_serial_version_id=true
sp_cleanup.add_generated_serial_version_id=false
sp_cleanup.add_missing_annotations=false
sp_cleanup.add_missing_deprecated_annotations=true
sp_cleanup.add_missing_methods=false
sp_cleanup.add_missing_nls_tags=false
sp_cleanup.add_missing_override_annotations=true
sp_cleanup.add_missing_override_annotations_interface_methods=false
sp_cleanup.add_serial_version_id=false
sp_cleanup.always_use_blocks=true
sp_cleanup.always_use_parentheses_in_expressions=false
sp_cleanup.always_use_this_for_non_static_field_access=false
sp_cleanup.always_use_this_for_non_static_method_access=false
sp_cleanup.convert_to_enhanced_for_loop=false
sp_cleanup.correct_indentation=false
sp_cleanup.format_source_code=true
sp_cleanup.format_source_code_changes_only=true
sp_cleanup.make_local_variable_final=false
sp_cleanup.make_parameters_final=false
sp_cleanup.make_private_fields_final=true
sp_cleanup.make_type_abstract_if_missing_method=false
sp_cleanup.make_variable_declarations_final=false
sp_cleanup.never_use_blocks=false
sp_cleanup.never_use_parentheses_in_expressions=true
sp_cleanup.on_save_use_additional_actions=true
sp_cleanup.organize_imports=false
sp_cleanup.qualify_static_field_accesses_with_declaring_class=false
sp_cleanup.qualify_static_member_accesses_through_instances_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_through_subtypes_with_declaring_class=true
sp_cleanup.qualify_static_member_accesses_with_declaring_class=false
sp_cleanup.qualify_static_method_accesses_with_declaring_class=false
sp_cleanup.remove_private_constructors=true
sp_cleanup.remove_trailing_whitespaces=true
sp_cleanup.remove_trailing_whitespaces_all=true
sp_cleanup.remove_trailing_whitespaces_ignore_empty=false
sp_cleanup.remove_unnecessary_casts=false
sp_cleanup.remove_unnecessary_nls_tags=false
sp_cleanup.remove_unused_imports=false
sp_cleanup.remove_unused_local_variables=false
sp_cleanup.remove_unused_private_fields=true
sp_cleanup.remove_unused_private_members=false
sp_cleanup.remove_unused_private_methods=true
sp_cleanup.remove_unused_private_types=true
sp_cleanup.sort_members=false
sp_cleanup.sort_members_all=false
sp_cleanup.use_blocks=false
sp_cleanup.use_blocks_only_for_return_and_throw=false
sp_cleanup.use_parentheses_in_expressions=false
sp_cleanup.use_this_for_non_static_field_access=false
sp_cleanup.use_this_for_non_static_field_access_only_if_necessary=true
sp_cleanup.use_this_for_non_static_method_access=false
sp_cleanup.use_this_for_non_static_method_access_only_if_necessary=true

View File

@ -0,0 +1,11 @@
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: %plugin_name
Bundle-SymbolicName: org.eclipse.jgit.generated.storage.dht.proto
Bundle-Version: 1.0.0.qualifier
Bundle-Localization: plugin
Bundle-Vendor: %provider_name
Bundle-ActivationPolicy: lazy
Bundle-RequiredExecutionEnvironment: J2SE-1.5
Export-Package: org.eclipse.jgit.generated.storage.dht.proto;version="1.0.0"
Import-Package: com.google.protobuf;version="[2.4.0,2.5.0)"

View File

@ -0,0 +1,5 @@
source.. = src/
output.. = bin/
bin.includes = META-INF/,\
.,\
plugin.properties

View File

@ -0,0 +1,11 @@
#!/bin/sh
#
# Update generated Java code from protocol buffer descriptions.
set -e
for proto in resources/org/eclipse/jgit/storage/dht/*.proto
do
echo >&2 Generating from $proto
protoc -Iresources --java_out=src $proto
done

View File

@ -0,0 +1,2 @@
plugin_name=JGit DHT Storage Protocol Buffer Messages (Incubation)
provider_name=Eclipse.org

View File

@ -0,0 +1,120 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
Copyright (C) 2011, Google Inc.
and other copyright owners as documented in the project's IP log.
This program and the accompanying materials are made available
under the terms of the Eclipse Distribution License v1.0 which
accompanies this distribution, is reproduced below, and is
available at http://www.eclipse.org/org/documents/edl-v10.php
All rights reserved.
Redistribution and use in source and binary forms, with or
without modification, are permitted provided that the following
conditions are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
- Neither the name of the Eclipse Foundation, Inc. nor the
names of its contributors may be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit-parent</artifactId>
<version>1.0.0-SNAPSHOT</version>
</parent>
<artifactId>org.eclipse.jgit.generated.storage.dht.proto</artifactId>
<name>JGit - DHT Storage Protocol Buffer Messages</name>
<description>
Compiled protocol buffer messages for DHT storage
</description>
<properties>
<translate-qualifier/>
</properties>
<dependencies>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/</sourceDirectory>
<resources>
<resource>
<directory>.</directory>
<includes>
<include>plugin.properties</include>
</includes>
</resource>
<resource>
<directory>resources/</directory>
</resource>
</resources>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId>
<inherited>true</inherited>
<executions>
<execution>
<id>attach-sources</id>
<phase>process-classes</phase>
<goals>
<goal>jar</goal>
</goals>
<configuration>
<archive>
<manifestFile>${source-bundle-manifest}</manifestFile>
</archive>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<configuration>
<archive>
<manifestFile>${bundle-manifest}</manifestFile>
</archive>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@ -0,0 +1,86 @@
// Copyright (C) 2011, Google Inc.
// and other copyright owners as documented in the project's IP log.
//
// This program and the accompanying materials are made available
// under the terms of the Eclipse Distribution License v1.0 which
// accompanies this distribution, is reproduced below, and is
// available at http://www.eclipse.org/org/documents/edl-v10.php
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or
// without modification, are permitted provided that the following
// conditions are met:
//
// - Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
//
// - Neither the name of the Eclipse Foundation, Inc. nor the
// names of its contributors may be used to endorse or promote
// products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
// CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
// INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
// OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
// NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//
// WARNING: If you edit this file, run generate.sh
//
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
syntax = "proto2";
package org.eclipse.jgit.storage.dht;
option java_generate_equals_and_hash = true;
option java_package = "org.eclipse.jgit.generated.storage.dht.proto";
import "org/eclipse/jgit/storage/dht/git_store.proto";
// Caches ObjectIndexTable in a single message.
//
message CachedObjectIndex {
message Item {
required string chunk_key = 1;
required ObjectInfo object_info = 2;
optional fixed64 time = 3;
}
repeated Item item = 1;
}
// Caches CachedPackInfo in a single message.
//
message CachedPackInfoList {
repeated CachedPackInfo pack = 1;
}
// Caches ChunkTable in a single message.
//
// WARNING: Formatters for this message are also hand-coded
// inside of the CacheChunkTable class. If you make changes
// to this message, ensure that class is also updated.
//
message CachedChunk {
required bytes data = 1;
optional bytes index = 2;
optional ChunkMeta meta = 3;
}

View File

@ -39,8 +39,17 @@
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package git_store; // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
option java_package = "org.eclipse.jgit.storage.dht.proto"; //
// WARNING: If you edit this file, run generate.sh
//
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
syntax = "proto2";
package org.eclipse.jgit.storage.dht;
option java_generate_equals_and_hash = true;
option java_package = "org.eclipse.jgit.generated.storage.dht.proto";
// Entry in RefTable describing the target of the reference. // Entry in RefTable describing the target of the reference.

View File

@ -12,7 +12,9 @@ Export-Package: org.eclipse.jgit.storage.dht;version="1.0.0",
org.eclipse.jgit.storage.dht.spi.memory;version="1.0.0" org.eclipse.jgit.storage.dht.spi.memory;version="1.0.0"
Bundle-ActivationPolicy: lazy Bundle-ActivationPolicy: lazy
Bundle-RequiredExecutionEnvironment: J2SE-1.5 Bundle-RequiredExecutionEnvironment: J2SE-1.5
Import-Package: org.eclipse.jgit.errors;version="[1.0.0,2.0.0)", Import-Package: com.google.protobuf;version="[2.4.0,2.5.0)",
org.eclipse.jgit.errors;version="[1.0.0,2.0.0)",
org.eclipse.jgit.generated.storage.dht.proto;version="[1.0.0,2.0.0)",
org.eclipse.jgit.lib;version="[1.0.0,2.0.0)", org.eclipse.jgit.lib;version="[1.0.0,2.0.0)",
org.eclipse.jgit.nls;version="[1.0.0,2.0.0)", org.eclipse.jgit.nls;version="[1.0.0,2.0.0)",
org.eclipse.jgit.revwalk;version="[1.0.0,2.0.0)", org.eclipse.jgit.revwalk;version="[1.0.0,2.0.0)",

View File

@ -70,6 +70,12 @@
<artifactId>org.eclipse.jgit</artifactId> <artifactId>org.eclipse.jgit</artifactId>
<version>${project.version}</version> <version>${project.version}</version>
</dependency> </dependency>
<dependency>
<groupId>org.eclipse.jgit</groupId>
<artifactId>org.eclipse.jgit.generated.storage.dht.proto</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies> </dependencies>
<build> <build>

View File

@ -4,9 +4,12 @@ corruptCompressedObject=Corrupt deflate stream in {0} at {1}
cycleInDeltaChain=Cycle in delta chain {0} offset {1} cycleInDeltaChain=Cycle in delta chain {0} offset {1}
databaseRequired=Database is required databaseRequired=Database is required
expectedObjectSizeDuringCopyAsIs=Object {0} has size of 0 expectedObjectSizeDuringCopyAsIs=Object {0} has size of 0
invalidCachedPackInfo=Invalid CachedPackInfo on {0} {1}
invalidChunkKey=Invalid ChunkKey {0} invalidChunkKey=Invalid ChunkKey {0}
invalidChunkMeta=Invalid ChunkMeta on {0}
invalidObjectIndexKey=Invalid ObjectIndexKey {0} invalidObjectIndexKey=Invalid ObjectIndexKey {0}
invalidObjectInfo=Invalid ObjectInfo on {0} invalidObjectInfo=Invalid ObjectInfo for {0} from {1}
invalidRefData=Invalid RefData on {0}
missingChunk=Missing {0} missingChunk=Missing {0}
missingLongOffsetBase=Missing base for offset -{1} in meta of {0} missingLongOffsetBase=Missing base for offset -{1} in meta of {0}
nameRequired=Name or key is required nameRequired=Name or key is required
@ -17,12 +20,6 @@ objectListCountingFrom=Counting objects in {0}
objectTypeUnknown=unknown objectTypeUnknown=unknown
packParserInvalidPointer=Invalid pointer inside pack parser: {0}, chunk {1}, offset {2}. packParserInvalidPointer=Invalid pointer inside pack parser: {0}, chunk {1}, offset {2}.
packParserRollbackFailed=DhtPackParser rollback failed packParserRollbackFailed=DhtPackParser rollback failed
protobufNegativeValuesNotSupported=Negative values are not supported
protobufNoArray=bytes field requires ByteBuffer.hasArray to be true
protobufNotBooleanValue=bool field {0} has invalid value {1}
protobufUnsupportedFieldType=Unsupported protobuf field type {0}
protobufWrongFieldLength=Field {0} should have length of {1}, found {2}
protobufWrongFieldType=Field {0} is of type {1}, expected {2}
recordingObjects=Recording objects recordingObjects=Recording objects
repositoryAlreadyExists=Repository {0} already exists repositoryAlreadyExists=Repository {0} already exists
repositoryMustBeBare=Only bare repositories are supported repositoryMustBeBare=Only bare repositories are supported

View File

@ -1,212 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.dht;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import org.eclipse.jgit.lib.ObjectId;
/**
* Summary information about a cached pack owned by a repository.
*/
public class CachedPackInfo {
/**
* Parse info from the storage system.
*
* @param raw
* the raw encoding of the info.
* @return the info object.
*/
public static CachedPackInfo fromBytes(byte[] raw) {
return fromBytes(TinyProtobuf.decode(raw));
}
/**
* Parse info from the storage system.
*
* @param d
* decoder for the message buffer.
* @return the info object.
*/
public static CachedPackInfo fromBytes(TinyProtobuf.Decoder d) {
CachedPackInfo info = new CachedPackInfo();
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
info.name = d.stringObjectId();
continue;
case 2:
info.version = d.stringObjectId();
continue;
case 3:
info.objectsTotal = d.int64();
continue;
case 4:
info.objectsDelta = d.int64();
continue;
case 5:
info.bytesTotal = d.int64();
continue;
case 6: {
TinyProtobuf.Decoder m = d.message();
for (;;) {
switch (m.next()) {
case 0:
continue PARSE;
case 1:
info.tips.add(m.stringObjectId());
continue;
default:
m.skip();
continue;
}
}
}
case 7: {
TinyProtobuf.Decoder m = d.message();
for (;;) {
switch (m.next()) {
case 0:
continue PARSE;
case 1:
info.chunks.add(ChunkKey.fromBytes(m));
continue;
default:
m.skip();
continue;
}
}
}
default:
d.skip();
continue;
}
}
return info;
}
private static byte[] asBytes(CachedPackInfo info) {
int tipSize = (2 + OBJECT_ID_STRING_LENGTH) * info.tips.size();
TinyProtobuf.Encoder tipList = TinyProtobuf.encode(tipSize);
for (ObjectId tip : info.tips)
tipList.string(1, tip);
int chunkSize = (2 + ChunkKey.KEYLEN) * info.chunks.size();
TinyProtobuf.Encoder chunkList = TinyProtobuf.encode(chunkSize);
for (ChunkKey key : info.chunks)
chunkList.bytes(1, key.asBytes());
TinyProtobuf.Encoder e = TinyProtobuf.encode(1024);
e.string(1, info.name);
e.string(2, info.version);
e.int64(3, info.objectsTotal);
e.int64IfNotZero(4, info.objectsDelta);
e.int64IfNotZero(5, info.bytesTotal);
e.message(6, tipList);
e.message(7, chunkList);
return e.asByteArray();
}
ObjectId name;
ObjectId version;
SortedSet<ObjectId> tips = new TreeSet<ObjectId>();
long objectsTotal;
long objectsDelta;
long bytesTotal;
List<ChunkKey> chunks = new ArrayList<ChunkKey>();
/** @return name of the information object. */
public CachedPackKey getRowKey() {
return new CachedPackKey(name, version);
}
/** @return number of objects stored in the cached pack. */
public long getObjectsTotal() {
return objectsTotal;
}
/** @return number of objects stored in delta format. */
public long getObjectsDelta() {
return objectsDelta;
}
/** @return number of bytes in the cached pack. */
public long getTotalBytes() {
return bytesTotal;
}
/** @return list of all chunks that make up this pack, in order. */
public List<ChunkKey> getChunkKeys() {
return Collections.unmodifiableList(chunks);
}
/**
* Convert this information into a byte array for storage.
*
* @return the data, encoded as a byte array. This does not include the key,
* callers must store that separately.
*/
public byte[] asBytes() {
return asBytes(this);
}
@Override
public String toString() {
return getRowKey().toString();
}
}

View File

@ -47,6 +47,7 @@
import java.text.MessageFormat; import java.text.MessageFormat;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
/** Unique identifier of a {@link CachedPackInfo} in the DHT. */ /** Unique identifier of a {@link CachedPackInfo} in the DHT. */
@ -61,18 +62,6 @@ public static CachedPackKey fromBytes(byte[] key) {
return fromBytes(key, 0, key.length); return fromBytes(key, 0, key.length);
} }
/**
* @param d
* decoder to read key from current field from.
* @return the key
*/
public static CachedPackKey fromBytes(TinyProtobuf.Decoder d) {
int len = d.bytesLength();
int ptr = d.bytesOffset();
byte[] buf = d.bytesArray();
return fromBytes(buf, ptr, len);
}
/** /**
* @param key * @param key
* @param ptr * @param ptr
@ -100,6 +89,16 @@ public static CachedPackKey fromString(String key) {
return new CachedPackKey(name, vers); return new CachedPackKey(name, vers);
} }
/**
* @param info
* @return the key
*/
public static CachedPackKey fromInfo(CachedPackInfo info) {
ObjectId name = ObjectId.fromString(info.getName());
ObjectId vers = ObjectId.fromString(info.getVersion());
return new CachedPackKey(name, vers);
}
private final ObjectId name; private final ObjectId name;
private final ObjectId version; private final ObjectId version;

View File

@ -52,10 +52,13 @@
import java.util.Map; import java.util.Map;
import java.util.zip.Deflater; import java.util.zip.Deflater;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.dht.ChunkMeta.BaseChunk;
import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.storage.dht.spi.Database;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.transport.PackedObjectInfo; import org.eclipse.jgit.transport.PackedObjectInfo;
@ -75,8 +78,6 @@ class ChunkFormatter {
private final byte[] varIntBuf; private final byte[] varIntBuf;
private final ChunkInfo info;
private final int maxObjects; private final int maxObjects;
private Map<ChunkKey, BaseChunkInfo> baseChunks; private Map<ChunkKey, BaseChunkInfo> baseChunks;
@ -95,25 +96,35 @@ class ChunkFormatter {
private PackChunk.Members builder; private PackChunk.Members builder;
private GitStore.ChunkInfo.Source source;
private boolean fragment;
private int objectType;
private int objectsTotal, objectsWhole, objectsRefDelta, objectsOfsDelta;
private ChunkInfo chunkInfo;
ChunkFormatter(RepositoryKey repo, DhtInserterOptions options) { ChunkFormatter(RepositoryKey repo, DhtInserterOptions options) {
this.repo = repo; this.repo = repo;
this.options = options; this.options = options;
this.varIntBuf = new byte[32]; this.varIntBuf = new byte[32];
this.info = new ChunkInfo();
this.chunkData = new byte[options.getChunkSize()]; this.chunkData = new byte[options.getChunkSize()];
this.maxObjects = options.getMaxObjectCount(); this.maxObjects = options.getMaxObjectCount();
this.objectType = -1;
} }
void setSource(ChunkInfo.Source src) { void setSource(GitStore.ChunkInfo.Source src) {
info.source = src; source = src;
} }
void setObjectType(int type) { void setObjectType(int type) {
info.objectType = type; objectType = type;
} }
void setFragment() { void setFragment() {
info.fragment = true; fragment = true;
} }
ChunkKey getChunkKey() { ChunkKey getChunkKey() {
@ -121,7 +132,7 @@ ChunkKey getChunkKey() {
} }
ChunkInfo getChunkInfo() { ChunkInfo getChunkInfo() {
return info; return chunkInfo;
} }
ChunkMeta getChunkMeta() { ChunkMeta getChunkMeta() {
@ -150,37 +161,58 @@ ChunkKey end(MessageDigest md) {
ptr += 4; ptr += 4;
md.update(chunkData, 0, ptr); md.update(chunkData, 0, ptr);
info.chunkKey = ChunkKey.create(repo, ObjectId.fromRaw(md.digest())); ChunkKey key = ChunkKey.create(repo, ObjectId.fromRaw(md.digest()));
info.chunkSize = chunkData.length;
GitStore.ChunkInfo.Builder info = GitStore.ChunkInfo.newBuilder();
info.setSource(source);
info.setObjectType(GitStore.ChunkInfo.ObjectType.valueOf(objectType));
if (fragment)
info.setIsFragment(true);
info.setChunkSize(chunkData.length);
GitStore.ChunkInfo.ObjectCounts.Builder cnts = info.getObjectCountsBuilder();
cnts.setTotal(objectsTotal);
if (objectsWhole > 0)
cnts.setWhole(objectsWhole);
if (objectsRefDelta > 0)
cnts.setRefDelta(objectsRefDelta);
if (objectsOfsDelta > 0)
cnts.setOfsDelta(objectsOfsDelta);
builder = new PackChunk.Members(); builder = new PackChunk.Members();
builder.setChunkKey(info.chunkKey); builder.setChunkKey(key);
builder.setChunkData(chunkData); builder.setChunkData(chunkData);
ChunkMeta meta = new ChunkMeta(info.chunkKey);
if (baseChunks != null) { if (baseChunks != null) {
meta.baseChunks = new ArrayList<BaseChunk>(baseChunks.size()); List<BaseChunk> list = new ArrayList<BaseChunk>(baseChunks.size());
for (BaseChunkInfo b : baseChunks.values()) { for (BaseChunkInfo b : baseChunks.values()) {
if (0 < b.useCount) if (0 < b.useCount) {
meta.baseChunks.add(new BaseChunk(b.relativeStart, b.key)); BaseChunk.Builder c = BaseChunk.newBuilder();
c.setRelativeStart(b.relativeStart);
c.setChunkKey(b.key.asString());
list.add(c.build());
}
} }
Collections.sort(meta.baseChunks, new Comparator<BaseChunk>() { Collections.sort(list, new Comparator<BaseChunk>() {
public int compare(BaseChunk a, BaseChunk b) { public int compare(BaseChunk a, BaseChunk b) {
return Long.signum(a.relativeStart - b.relativeStart); return Long.signum(a.getRelativeStart()
- b.getRelativeStart());
} }
}); });
} ChunkMeta.Builder b = ChunkMeta.newBuilder();
if (!meta.isEmpty()) { b.addAllBaseChunk(list);
ChunkMeta meta = b.build();
builder.setMeta(meta); builder.setMeta(meta);
info.metaSize = meta.asBytes().length; info.setMetaSize(meta.getSerializedSize());
} }
if (objectList != null && !objectList.isEmpty()) { if (objectList != null && !objectList.isEmpty()) {
byte[] index = ChunkIndex.create(objectList); byte[] index = ChunkIndex.create(objectList);
builder.setChunkIndex(index); builder.setChunkIndex(index);
info.indexSize = index.length; info.setIndexSize(index.length);
} }
chunkInfo = new ChunkInfo(key, info.build());
return getChunkKey(); return getChunkKey();
} }
@ -198,7 +230,7 @@ public int compare(BaseChunk a, BaseChunk b) {
void safePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException { void safePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
WriteBuffer chunkBuf = db.newWriteBuffer(); WriteBuffer chunkBuf = db.newWriteBuffer();
db.repository().put(repo, info, chunkBuf); db.repository().put(repo, getChunkInfo(), chunkBuf);
chunkBuf.flush(); chunkBuf.flush();
db.chunk().put(builder, chunkBuf); db.chunk().put(builder, chunkBuf);
@ -208,7 +240,7 @@ void safePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
} }
void unsafePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException { void unsafePut(Database db, WriteBuffer dbWriteBuffer) throws DhtException {
db.repository().put(repo, info, dbWriteBuffer); db.repository().put(repo, getChunkInfo(), dbWriteBuffer);
db.chunk().put(builder, dbWriteBuffer); db.chunk().put(builder, dbWriteBuffer);
linkObjects(db, dbWriteBuffer); linkObjects(db, dbWriteBuffer);
} }
@ -225,11 +257,11 @@ private void linkObjects(Database db, WriteBuffer dbWriteBuffer)
boolean whole(Deflater def, int type, byte[] data, int off, final int size, boolean whole(Deflater def, int type, byte[] data, int off, final int size,
ObjectId objId) { ObjectId objId) {
if (free() < 10 || maxObjects <= info.objectsTotal) if (free() < 10 || maxObjects <= objectsTotal)
return false; return false;
header(type, size); header(type, size);
info.objectsWhole++; objectsWhole++;
currentObjectType = type; currentObjectType = type;
int endOfHeader = ptr; int endOfHeader = ptr;
@ -257,20 +289,20 @@ boolean whole(Deflater def, int type, byte[] data, int off, final int size,
final int packedSize = ptr - endOfHeader; final int packedSize = ptr - endOfHeader;
objectList.add(new StoredObject(objId, type, mark, packedSize, size)); objectList.add(new StoredObject(objId, type, mark, packedSize, size));
if (info.objectType < 0) if (objectType < 0)
info.objectType = type; objectType = type;
else if (info.objectType != type) else if (objectType != type)
info.objectType = ChunkInfo.OBJ_MIXED; objectType = ChunkInfo.OBJ_MIXED;
return true; return true;
} }
boolean whole(int type, long inflatedSize) { boolean whole(int type, long inflatedSize) {
if (free() < 10 || maxObjects <= info.objectsTotal) if (free() < 10 || maxObjects <= objectsTotal)
return false; return false;
header(type, inflatedSize); header(type, inflatedSize);
info.objectsWhole++; objectsWhole++;
currentObjectType = type; currentObjectType = type;
return true; return true;
} }
@ -278,11 +310,11 @@ boolean whole(int type, long inflatedSize) {
boolean ofsDelta(long inflatedSize, long negativeOffset) { boolean ofsDelta(long inflatedSize, long negativeOffset) {
final int ofsPtr = encodeVarInt(negativeOffset); final int ofsPtr = encodeVarInt(negativeOffset);
final int ofsLen = varIntBuf.length - ofsPtr; final int ofsLen = varIntBuf.length - ofsPtr;
if (free() < 10 + ofsLen || maxObjects <= info.objectsTotal) if (free() < 10 + ofsLen || maxObjects <= objectsTotal)
return false; return false;
header(Constants.OBJ_OFS_DELTA, inflatedSize); header(Constants.OBJ_OFS_DELTA, inflatedSize);
info.objectsOfsDelta++; objectsOfsDelta++;
currentObjectType = Constants.OBJ_OFS_DELTA; currentObjectType = Constants.OBJ_OFS_DELTA;
currentObjectBase = null; currentObjectBase = null;
@ -294,11 +326,11 @@ boolean ofsDelta(long inflatedSize, long negativeOffset) {
} }
boolean refDelta(long inflatedSize, AnyObjectId baseId) { boolean refDelta(long inflatedSize, AnyObjectId baseId) {
if (free() < 30 || maxObjects <= info.objectsTotal) if (free() < 30 || maxObjects <= objectsTotal)
return false; return false;
header(Constants.OBJ_REF_DELTA, inflatedSize); header(Constants.OBJ_REF_DELTA, inflatedSize);
info.objectsRefDelta++; objectsRefDelta++;
currentObjectType = Constants.OBJ_REF_DELTA; currentObjectType = Constants.OBJ_REF_DELTA;
baseId.copyRawTo(chunkData, ptr); baseId.copyRawTo(chunkData, ptr);
@ -345,7 +377,7 @@ boolean isEmpty() {
} }
int getObjectCount() { int getObjectCount() {
return info.objectsTotal; return objectsTotal;
} }
int position() { int position() {
@ -374,32 +406,32 @@ void rollback() {
} }
void adjustObjectCount(int delta, int type) { void adjustObjectCount(int delta, int type) {
info.objectsTotal += delta; objectsTotal += delta;
switch (type) { switch (type) {
case Constants.OBJ_COMMIT: case Constants.OBJ_COMMIT:
case Constants.OBJ_TREE: case Constants.OBJ_TREE:
case Constants.OBJ_BLOB: case Constants.OBJ_BLOB:
case Constants.OBJ_TAG: case Constants.OBJ_TAG:
info.objectsWhole += delta; objectsWhole += delta;
break; break;
case Constants.OBJ_OFS_DELTA: case Constants.OBJ_OFS_DELTA:
info.objectsOfsDelta += delta; objectsOfsDelta += delta;
if (currentObjectBase != null && --currentObjectBase.useCount == 0) if (currentObjectBase != null && --currentObjectBase.useCount == 0)
baseChunks.remove(currentObjectBase.key); baseChunks.remove(currentObjectBase.key);
currentObjectBase = null; currentObjectBase = null;
break; break;
case Constants.OBJ_REF_DELTA: case Constants.OBJ_REF_DELTA:
info.objectsRefDelta += delta; objectsRefDelta += delta;
break; break;
} }
} }
private void header(int type, long inflatedSize) { private void header(int type, long inflatedSize) {
mark = ptr; mark = ptr;
info.objectsTotal++; objectsTotal++;
long nextLength = inflatedSize >>> 4; long nextLength = inflatedSize >>> 4;
chunkData[ptr++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (inflatedSize & 0x0F)); chunkData[ptr++] = (byte) ((nextLength > 0 ? 0x80 : 0x00) | (type << 4) | (inflatedSize & 0x0F));
@ -454,8 +486,12 @@ private static class StoredObject extends PackedObjectInfo {
} }
ObjectInfo link(ChunkKey key) { ObjectInfo link(ChunkKey key) {
final int ptr = (int) getOffset(); GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
return new ObjectInfo(key, -1, type, ptr, packed, inflated, null, false); b.setObjectType(ObjectType.valueOf(type));
b.setOffset((int) getOffset());
b.setPackedSize(packed);
b.setInflatedSize(inflated);
return new ObjectInfo(key, b.build());
} }
} }
} }

View File

@ -43,150 +43,32 @@
package org.eclipse.jgit.storage.dht; package org.eclipse.jgit.storage.dht;
import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
/** /**
* Summary information about a chunk owned by a repository. * Summary information about a chunk owned by a repository.
*/ */
public class ChunkInfo { public class ChunkInfo {
/** Source the chunk (what code path created it). */
public static enum Source implements TinyProtobuf.Enum {
/** Came in over the network from an external source */
RECEIVE(1),
/** Created in this repository (e.g. a merge). */
INSERT(2),
/** Generated during a repack of this repository. */
REPACK(3);
private final int value;
Source(int val) {
this.value = val;
}
public int value() {
return value;
}
}
/** Mixed objects are stored in the chunk (instead of single type). */ /** Mixed objects are stored in the chunk (instead of single type). */
public static final int OBJ_MIXED = 0; public static final int OBJ_MIXED = 0;
private final ChunkKey chunkKey;
private final GitStore.ChunkInfo data;
/** /**
* Parse info from the storage system. * Wrap a ChunkInfo message.
* *
* @param chunkKey * @param key
* the chunk the link points to. * associated chunk key.
* @param raw * @param data
* the raw encoding of the info. * data.
* @return the info object.
*/ */
public static ChunkInfo fromBytes(ChunkKey chunkKey, byte[] raw) { public ChunkInfo(ChunkKey key, GitStore.ChunkInfo data) {
ChunkInfo info = new ChunkInfo(); this.chunkKey = key;
info.chunkKey = chunkKey; this.data = data;
TinyProtobuf.Decoder d = TinyProtobuf.decode(raw);
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
info.source = d.intEnum(Source.values());
continue;
case 2:
info.objectType = d.int32();
continue;
case 3:
info.fragment = d.bool();
continue;
case 4:
info.cachedPack = CachedPackKey.fromBytes(d);
continue;
case 5: {
TinyProtobuf.Decoder m = d.message();
for (;;) {
switch (m.next()) {
case 0:
continue PARSE;
case 1:
info.objectsTotal = m.int32();
continue;
case 2:
info.objectsWhole = m.int32();
continue;
case 3:
info.objectsOfsDelta = m.int32();
continue;
case 4:
info.objectsRefDelta = m.int32();
continue;
default:
m.skip();
continue;
}
}
}
case 6:
info.chunkSize = d.int32();
continue;
case 7:
info.indexSize = d.int32();
continue;
case 8:
info.metaSize = d.int32();
continue;
default:
d.skip();
continue;
}
}
return info;
} }
private static byte[] asBytes(ChunkInfo info) {
TinyProtobuf.Encoder objects = TinyProtobuf.encode(48);
objects.int32IfNotZero(1, info.objectsTotal);
objects.int32IfNotZero(2, info.objectsWhole);
objects.int32IfNotZero(3, info.objectsOfsDelta);
objects.int32IfNotZero(4, info.objectsRefDelta);
TinyProtobuf.Encoder e = TinyProtobuf.encode(128);
e.intEnum(1, info.source);
e.int32IfNotNegative(2, info.objectType);
e.boolIfTrue(3, info.fragment);
e.string(4, info.cachedPack);
e.message(5, objects);
e.int32IfNotZero(6, info.chunkSize);
e.int32IfNotZero(7, info.indexSize);
e.int32IfNotZero(8, info.metaSize);
return e.asByteArray();
}
ChunkKey chunkKey;
Source source;
int objectType = -1;
boolean fragment;
CachedPackKey cachedPack;
int objectsTotal;
int objectsWhole;
int objectsOfsDelta;
int objectsRefDelta;
int chunkSize;
int indexSize;
int metaSize;
/** @return the repository that contains the chunk. */ /** @return the repository that contains the chunk. */
public RepositoryKey getRepositoryKey() { public RepositoryKey getRepositoryKey() {
return chunkKey.getRepositoryKey(); return chunkKey.getRepositoryKey();
@ -197,69 +79,9 @@ public ChunkKey getChunkKey() {
return chunkKey; return chunkKey;
} }
/** @return source of this chunk. */ /** @return the underlying message containing all data. */
public Source getSource() { public GitStore.ChunkInfo getData() {
return source; return data;
}
/** @return type of object in the chunk, or {@link #OBJ_MIXED}. */
public int getObjectType() {
return objectType;
}
/** @return true if this chunk is part of a large fragmented object. */
public boolean isFragment() {
return fragment;
}
/** @return cached pack this is a member of, or null. */
public CachedPackKey getCachedPack() {
return cachedPack;
}
/** @return size of the chunk's compressed data, in bytes. */
public int getChunkSizeInBytes() {
return chunkSize;
}
/** @return size of the chunk's index data, in bytes. */
public int getIndexSizeInBytes() {
return indexSize;
}
/** @return size of the chunk's meta data, in bytes. */
public int getMetaSizeInBytes() {
return metaSize;
}
/** @return number of objects stored in the chunk. */
public int getObjectsTotal() {
return objectsTotal;
}
/** @return number of whole objects stored in the chunk. */
public int getObjectsWhole() {
return objectsWhole;
}
/** @return number of OFS_DELTA objects stored in the chunk. */
public int getObjectsOffsetDelta() {
return objectsOfsDelta;
}
/** @return number of REF_DELTA objects stored in the chunk. */
public int getObjectsReferenceDelta() {
return objectsRefDelta;
}
/**
* Convert this link into a byte array for storage.
*
* @return the link data, encoded as a byte array. This does not include the
* ChunkKey, callers must store that separately.
*/
public byte[] asBytes() {
return asBytes(this);
} }
@Override @Override
@ -267,20 +89,8 @@ public String toString() {
StringBuilder b = new StringBuilder(); StringBuilder b = new StringBuilder();
b.append("ChunkInfo:"); b.append("ChunkInfo:");
b.append(chunkKey); b.append(chunkKey);
b.append(" ["); b.append("\n");
if (getSource() != null) b.append(data);
b.append(" ").append(getSource());
if (isFragment())
b.append(" fragment");
if (getObjectType() != 0)
b.append(" ").append(Constants.typeString(getObjectType()));
if (0 < getObjectsTotal())
b.append(" objects=").append(getObjectsTotal());
if (0 < getChunkSizeInBytes())
b.append(" chunk=").append(getChunkSizeInBytes()).append("B");
if (0 < getIndexSizeInBytes())
b.append(" index=").append(getIndexSizeInBytes()).append("B");
b.append(" ]");
return b.toString(); return b.toString();
} }
} }

View File

@ -73,18 +73,6 @@ public static ChunkKey fromBytes(byte[] key) {
return fromBytes(key, 0, key.length); return fromBytes(key, 0, key.length);
} }
/**
* @param d
* decoder to read key from current field from.
* @return the key
*/
public static ChunkKey fromBytes(TinyProtobuf.Decoder d) {
int len = d.bytesLength();
int ptr = d.bytesOffset();
byte[] buf = d.bytesArray();
return fromBytes(buf, ptr, len);
}
/** /**
* @param key * @param key
* @param ptr * @param ptr

View File

@ -1,391 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.dht;
import java.text.MessageFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** Metadata stored inline with each PackChunk. */
public class ChunkMeta {
/**
* Convert from byte array.
*
* @param key
* the chunk key this meta object sits in.
* @param raw
* the raw byte array.
* @return the chunk meta.
*/
public static ChunkMeta fromBytes(ChunkKey key, byte[] raw) {
return fromBytes(key, TinyProtobuf.decode(raw));
}
/**
* Convert from byte array.
*
* @param key
* the chunk key this meta object sits in.
* @param d
* the message decoder.
* @return the chunk meta.
*/
public static ChunkMeta fromBytes(ChunkKey key, TinyProtobuf.Decoder d) {
List<BaseChunk> baseChunk = null;
List<ChunkKey> fragment = null;
PrefetchHint commit = null;
PrefetchHint tree = null;
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
if (baseChunk == null)
baseChunk = new ArrayList<BaseChunk>(4);
baseChunk.add(BaseChunk.fromBytes(d.message()));
continue;
case 2:
if (fragment == null)
fragment = new ArrayList<ChunkKey>(4);
fragment.add(ChunkKey.fromBytes(d));
continue;
case 51:
commit = PrefetchHint.fromBytes(d.message());
continue;
case 52:
tree = PrefetchHint.fromBytes(d.message());
continue;
default:
d.skip();
continue;
}
}
return new ChunkMeta(key, baseChunk, fragment, commit, tree);
}
private final ChunkKey chunkKey;
List<BaseChunk> baseChunks;
List<ChunkKey> fragments;
PrefetchHint commitPrefetch;
PrefetchHint treePrefetch;
ChunkMeta(ChunkKey key) {
this(key, null, null, null, null);
}
ChunkMeta(ChunkKey chunkKey, List<BaseChunk> baseChunk,
List<ChunkKey> fragment, PrefetchHint commit, PrefetchHint tree) {
this.chunkKey = chunkKey;
this.baseChunks = baseChunk;
this.fragments = fragment;
this.commitPrefetch = commit;
this.treePrefetch = tree;
}
/** @return key of the chunk this meta information is for. */
public ChunkKey getChunkKey() {
return chunkKey;
}
BaseChunk getBaseChunk(long position) throws DhtException {
// Chunks are sorted by ascending relative_start order.
// Thus for a pack sequence of: A B C, we have:
//
// -- C relative_start = 10,000
// -- B relative_start = 20,000
// -- A relative_start = 30,000
//
// Indicating that chunk C starts 10,000 bytes before us,
// chunk B starts 20,000 bytes before us (and 10,000 before C),
// chunk A starts 30,000 bytes before us (and 10,000 before B),
//
// If position falls within:
//
// -- C (10k), then position is between 0..10,000
// -- B (20k), then position is between 10,000 .. 20,000
// -- A (30k), then position is between 20,000 .. 30,000
int high = baseChunks.size();
int low = 0;
while (low < high) {
final int mid = (low + high) >>> 1;
final BaseChunk base = baseChunks.get(mid);
if (position > base.relativeStart) {
low = mid + 1;
} else if (mid == 0 || position == base.relativeStart) {
return base;
} else if (baseChunks.get(mid - 1).relativeStart < position) {
return base;
} else {
high = mid;
}
}
throw new DhtException(MessageFormat.format(
DhtText.get().missingLongOffsetBase, chunkKey,
Long.valueOf(position)));
}
/** @return number of fragment chunks that make up the object. */
public int getFragmentCount() {
return fragments != null ? fragments.size() : 0;
}
/**
* Get the nth fragment key.
*
* @param nth
* @return the key.
*/
public ChunkKey getFragmentKey(int nth) {
return fragments.get(nth);
}
/**
* Find the key of the fragment that occurs after this chunk.
*
* @param currentKey
* the current chunk key.
* @return next chunk after this; null if there isn't one.
*/
public ChunkKey getNextFragment(ChunkKey currentKey) {
for (int i = 0; i < fragments.size() - 1; i++) {
if (fragments.get(i).equals(currentKey))
return fragments.get(i + 1);
}
return null;
}
/** @return chunks to visit. */
public PrefetchHint getCommitPrefetch() {
return commitPrefetch;
}
/** @return chunks to visit. */
public PrefetchHint getTreePrefetch() {
return treePrefetch;
}
/** @return true if there is no data in this object worth storing. */
boolean isEmpty() {
if (baseChunks != null && !baseChunks.isEmpty())
return false;
if (fragments != null && !fragments.isEmpty())
return false;
if (commitPrefetch != null && !commitPrefetch.isEmpty())
return false;
if (treePrefetch != null && !treePrefetch.isEmpty())
return false;
return true;
}
/** @return format as byte array for storage. */
public byte[] asBytes() {
TinyProtobuf.Encoder e = TinyProtobuf.encode(256);
if (baseChunks != null) {
for (BaseChunk base : baseChunks)
e.message(1, base.asBytes());
}
if (fragments != null) {
for (ChunkKey key : fragments)
e.bytes(2, key.asBytes());
}
if (commitPrefetch != null)
e.message(51, commitPrefetch.asBytes());
if (treePrefetch != null)
e.message(52, treePrefetch.asBytes());
return e.asByteArray();
}
/** Describes other chunks that contain the bases for this chunk's deltas. */
public static class BaseChunk {
final long relativeStart;
private final ChunkKey chunk;
BaseChunk(long relativeStart, ChunkKey chunk) {
this.relativeStart = relativeStart;
this.chunk = chunk;
}
/** @return bytes backward from current chunk to start of base chunk. */
public long getRelativeStart() {
return relativeStart;
}
/** @return unique key of this chunk. */
public ChunkKey getChunkKey() {
return chunk;
}
TinyProtobuf.Encoder asBytes() {
int max = 11 + 2 + ChunkKey.KEYLEN;
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
e.int64(1, relativeStart);
e.bytes(2, chunk.asBytes());
return e;
}
static BaseChunk fromBytes(TinyProtobuf.Decoder d) {
long relativeStart = -1;
ChunkKey chunk = null;
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
relativeStart = d.int64();
continue;
case 2:
chunk = ChunkKey.fromBytes(d);
continue;
default:
d.skip();
continue;
}
}
return new BaseChunk(relativeStart, chunk);
}
}
/** Describes the prefetching for a particular object type. */
public static class PrefetchHint {
private final List<ChunkKey> edge;
private final List<ChunkKey> sequential;
PrefetchHint(List<ChunkKey> edge, List<ChunkKey> sequential) {
if (edge == null)
edge = Collections.emptyList();
else
edge = Collections.unmodifiableList(edge);
if (sequential == null)
sequential = Collections.emptyList();
else
sequential = Collections.unmodifiableList(sequential);
this.edge = edge;
this.sequential = sequential;
}
/** @return chunks on the edge of this chunk. */
public List<ChunkKey> getEdge() {
return edge;
}
/** @return chunks according to sequential ordering. */
public List<ChunkKey> getSequential() {
return sequential;
}
boolean isEmpty() {
return edge.isEmpty() && sequential.isEmpty();
}
TinyProtobuf.Encoder asBytes() {
int max = 0;
max += (2 + ChunkKey.KEYLEN) * edge.size();
max += (2 + ChunkKey.KEYLEN) * sequential.size();
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
for (ChunkKey key : edge)
e.bytes(1, key.asBytes());
for (ChunkKey key : sequential)
e.bytes(2, key.asBytes());
return e;
}
static PrefetchHint fromBytes(TinyProtobuf.Decoder d) {
ArrayList<ChunkKey> edge = null;
ArrayList<ChunkKey> sequential = null;
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
if (edge == null)
edge = new ArrayList<ChunkKey>(16);
edge.add(ChunkKey.fromBytes(d));
continue;
case 2:
if (sequential == null)
sequential = new ArrayList<ChunkKey>(16);
sequential.add(ChunkKey.fromBytes(d));
continue;
default:
d.skip();
continue;
}
}
if (edge != null)
edge.trimToSize();
if (sequential != null)
sequential.trimToSize();
return new PrefetchHint(edge, sequential);
}
}
}

View File

@ -0,0 +1,111 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.dht;
import java.text.MessageFormat;
import java.util.List;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta.BaseChunk;
class ChunkMetaUtil {
static BaseChunk getBaseChunk(ChunkKey chunkKey, ChunkMeta meta,
long position) throws DhtException {
// Chunks are sorted by ascending relative_start order.
// Thus for a pack sequence of: A B C, we have:
//
// -- C relative_start = 10,000
// -- B relative_start = 20,000
// -- A relative_start = 30,000
//
// Indicating that chunk C starts 10,000 bytes before us,
// chunk B starts 20,000 bytes before us (and 10,000 before C),
// chunk A starts 30,000 bytes before us (and 10,000 before B),
//
// If position falls within:
//
// -- C (10k), then position is between 0..10,000
// -- B (20k), then position is between 10,000 .. 20,000
// -- A (30k), then position is between 20,000 .. 30,000
List<BaseChunk> baseChunks = meta.getBaseChunkList();
int high = baseChunks.size();
int low = 0;
while (low < high) {
final int mid = (low + high) >>> 1;
final BaseChunk base = baseChunks.get(mid);
if (position > base.getRelativeStart()) {
low = mid + 1;
} else if (mid == 0 || position == base.getRelativeStart()) {
return base;
} else if (baseChunks.get(mid - 1).getRelativeStart() < position) {
return base;
} else {
high = mid;
}
}
throw new DhtException(MessageFormat.format(
DhtText.get().missingLongOffsetBase, chunkKey,
Long.valueOf(position)));
}
static ChunkKey getNextFragment(ChunkMeta meta, ChunkKey chunkKey) {
int cnt = meta.getFragmentCount();
for (int i = 0; i < cnt - 1; i++) {
ChunkKey key = ChunkKey.fromString(meta.getFragment(i));
if (chunkKey.equals(key))
return ChunkKey.fromString(meta.getFragment(i + 1));
}
return null;
}
private ChunkMetaUtil() {
// Static utilities only, do not create instances.
}
}

View File

@ -45,12 +45,16 @@
import java.io.IOException; import java.io.IOException;
import java.text.MessageFormat; import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo.ChunkList;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.pack.CachedPack; import org.eclipse.jgit.storage.pack.CachedPack;
import org.eclipse.jgit.storage.pack.ObjectToPack; import org.eclipse.jgit.storage.pack.ObjectToPack;
@ -61,7 +65,11 @@
public class DhtCachedPack extends CachedPack { public class DhtCachedPack extends CachedPack {
private final CachedPackInfo info; private final CachedPackInfo info;
private Set<ChunkKey> chunkKeySet; private Set<ObjectId> tips;
private Set<ChunkKey> keySet;
private ChunkKey[] keyList;
DhtCachedPack(CachedPackInfo info) { DhtCachedPack(CachedPackInfo info) {
this.info = info; this.info = info;
@ -69,7 +77,13 @@ public class DhtCachedPack extends CachedPack {
@Override @Override
public Set<ObjectId> getTips() { public Set<ObjectId> getTips() {
return Collections.unmodifiableSet(info.tips); if (tips == null) {
tips = new HashSet<ObjectId>();
for (String idString : info.getTipList().getObjectNameList())
tips.add(ObjectId.fromString(idString));
tips = Collections.unmodifiableSet(tips);
}
return tips;
} }
@Override @Override
@ -90,23 +104,37 @@ public CachedPackInfo getCachedPackInfo() {
@Override @Override
public boolean hasObject(ObjectToPack obj, StoredObjectRepresentation rep) { public boolean hasObject(ObjectToPack obj, StoredObjectRepresentation rep) {
DhtObjectRepresentation objrep = (DhtObjectRepresentation) rep; DhtObjectRepresentation objrep = (DhtObjectRepresentation) rep;
if (chunkKeySet == null) if (keySet == null)
chunkKeySet = new HashSet<ChunkKey>(info.chunks); init();
return chunkKeySet.contains(objrep.getChunkKey()); return keySet.contains(objrep.getChunkKey());
}
private void init() {
ChunkList chunkList = info.getChunkList();
int cnt = chunkList.getChunkKeyCount();
keySet = new HashSet<ChunkKey>();
keyList = new ChunkKey[cnt];
for (int i = 0; i < cnt; i++) {
ChunkKey key = ChunkKey.fromString(chunkList.getChunkKey(i));
keySet.add(key);
keyList[i] = key;
}
} }
void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx) void copyAsIs(PackOutputStream out, boolean validate, DhtReader ctx)
throws IOException { throws IOException {
if (keyList == null)
init();
Prefetcher p = new Prefetcher(ctx, 0); Prefetcher p = new Prefetcher(ctx, 0);
p.push(info.chunks); p.push(Arrays.asList(keyList));
copyPack(out, ctx, p, validate); copyPack(out, p, validate);
} }
private void copyPack(PackOutputStream out, DhtReader ctx, private void copyPack(PackOutputStream out, Prefetcher prefetcher,
Prefetcher prefetcher, boolean validate) throws DhtException, boolean validate) throws DhtException, DhtMissingChunkException,
DhtMissingChunkException, IOException { IOException {
Map<ChunkKey, Long> startsAt = new HashMap<ChunkKey, Long>(); Map<String, Long> startsAt = new HashMap<String, Long>();
for (ChunkKey key : info.chunks) { for (ChunkKey key : keyList) {
PackChunk chunk = prefetcher.get(key); PackChunk chunk = prefetcher.get(key);
// The prefetcher should always produce the chunk for us, if not // The prefetcher should always produce the chunk for us, if not
@ -122,29 +150,34 @@ private void copyPack(PackOutputStream out, DhtReader ctx,
// incorrectly created and would confuse the client. // incorrectly created and would confuse the client.
// //
long position = out.length(); long position = out.length();
if (chunk.getMeta() != null && chunk.getMeta().baseChunks != null) { ChunkMeta meta = chunk.getMeta();
for (ChunkMeta.BaseChunk base : chunk.getMeta().baseChunks) { if (meta != null && meta.getBaseChunkCount() != 0) {
for (ChunkMeta.BaseChunk base : meta.getBaseChunkList()) {
Long act = startsAt.get(base.getChunkKey()); Long act = startsAt.get(base.getChunkKey());
long exp = position - base.getRelativeStart(); long exp = position - base.getRelativeStart();
if (act == null) { if (act == null) {
throw new DhtException(MessageFormat.format(DhtText throw new DhtException(MessageFormat.format(DhtText
.get().wrongChunkPositionInCachedPack, info .get().wrongChunkPositionInCachedPack,
.getRowKey(), base.getChunkKey(), rowKey(), base.getChunkKey(),
"[not written]", key, exp)); "[not written]", key, Long.valueOf(exp)));
} }
if (act.longValue() != exp) { if (act.longValue() != exp) {
throw new DhtException(MessageFormat.format(DhtText throw new DhtException(MessageFormat.format(DhtText
.get().wrongChunkPositionInCachedPack, info .get().wrongChunkPositionInCachedPack,
.getRowKey(), base.getChunkKey(), // rowKey(), base.getChunkKey(),
act, key, exp)); act, key, Long.valueOf(exp)));
} }
} }
} }
startsAt.put(key, Long.valueOf(position)); startsAt.put(key.asString(), Long.valueOf(position));
chunk.copyEntireChunkAsIs(out, null, validate); chunk.copyEntireChunkAsIs(out, null, validate);
} }
} }
private String rowKey() {
return info.getName() + "." + info.getVersion();
}
} }

View File

@ -53,6 +53,9 @@
import java.util.LinkedList; import java.util.LinkedList;
import java.util.zip.Deflater; import java.util.zip.Deflater;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ObjectInfo.ObjectType;
import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectInserter; import org.eclipse.jgit.lib.ObjectInserter;
@ -174,8 +177,13 @@ private ObjectId insertStream(final int type, final long inflatedSize,
ChunkKey key = chunk.end(chunkDigest); ChunkKey key = chunk.end(chunkDigest);
chunk.setChunkIndex(Collections.singletonList(oe)); chunk.setChunkIndex(Collections.singletonList(oe));
chunk.safePut(db, dbBuffer()); chunk.safePut(db, dbBuffer());
ObjectInfo info = new ObjectInfo(key, -1, type, position,
packedSize, inflatedSize, null, false); GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
b.setObjectType(ObjectType.valueOf(type));
b.setOffset(position);
b.setPackedSize(packedSize);
b.setInflatedSize(inflatedSize);
ObjectInfo info = new ObjectInfo(key, b.build());
ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId); ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId);
db.objectIndex().add(objKey, info, dbBuffer()); db.objectIndex().add(objKey, info, dbBuffer());
return objId; return objId;
@ -188,12 +196,15 @@ private ObjectId insertStream(final int type, final long inflatedSize,
chunk = null; chunk = null;
ChunkKey firstChunkKey = fragmentList.get(0); ChunkKey firstChunkKey = fragmentList.get(0);
ChunkMeta.Builder metaBuilder = ChunkMeta.newBuilder();
for (ChunkKey k : fragmentList)
metaBuilder.addFragment(k.asString());
ChunkMeta meta = metaBuilder.build();
for (ChunkKey key : fragmentList) { for (ChunkKey key : fragmentList) {
PackChunk.Members builder = new PackChunk.Members(); PackChunk.Members builder = new PackChunk.Members();
builder.setChunkKey(key); builder.setChunkKey(key);
ChunkMeta meta = new ChunkMeta(key);
meta.fragments = fragmentList;
builder.setMeta(meta); builder.setMeta(meta);
if (firstChunkKey.equals(key)) if (firstChunkKey.equals(key))
@ -202,8 +213,12 @@ private ObjectId insertStream(final int type, final long inflatedSize,
db.chunk().put(builder, dbBuffer()); db.chunk().put(builder, dbBuffer());
} }
ObjectInfo info = new ObjectInfo(firstChunkKey, -1, type, position, GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
packedSize, inflatedSize, null, true); b.setObjectType(ObjectType.valueOf(type));
b.setOffset(position);
b.setPackedSize(packedSize);
b.setInflatedSize(inflatedSize);
ObjectInfo info = new ObjectInfo(firstChunkKey, b.build());
ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId); ObjectIndexKey objKey = ObjectIndexKey.create(repo, objId);
db.objectIndex().add(objKey, info, dbBuffer()); db.objectIndex().add(objKey, info, dbBuffer());
@ -234,12 +249,13 @@ public ObjectId insert(int type, byte[] data, int off, int len)
// TODO Allow more than one chunk pending at a time, this would // TODO Allow more than one chunk pending at a time, this would
// permit batching puts of the ChunkInfo records. // permit batching puts of the ChunkInfo records.
activeChunk.end(digest()); if (!activeChunk.isEmpty()) {
activeChunk.safePut(db, dbBuffer()); activeChunk.end(digest());
activeChunk = newChunk(); activeChunk.safePut(db, dbBuffer());
activeChunk = newChunk();
if (activeChunk.whole(deflater(), type, data, off, len, objId)) if (activeChunk.whole(deflater(), type, data, off, len, objId))
return objId; return objId;
}
return insertStream(type, len, asStream(data, off, len)); return insertStream(type, len, asStream(data, off, len));
} }
@ -295,7 +311,7 @@ private ChunkFormatter newChunk() {
ChunkFormatter fmt; ChunkFormatter fmt;
fmt = new ChunkFormatter(repo, options); fmt = new ChunkFormatter(repo, options);
fmt.setSource(ChunkInfo.Source.INSERT); fmt.setSource(GitStore.ChunkInfo.Source.INSERT);
return fmt; return fmt;
} }

View File

@ -49,6 +49,7 @@
import java.security.SecureRandom; import java.security.SecureRandom;
import java.util.zip.Deflater; import java.util.zip.Deflater;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.Config; import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.CoreConfig; import org.eclipse.jgit.lib.CoreConfig;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;

View File

@ -76,7 +76,7 @@ public ObjectId getDeltaBase() {
@Override @Override
public int getFormat() { public int getFormat() {
if (info.getDeltaBase() != null) if (info.isDelta())
return PACK_DELTA; return PACK_DELTA;
return PACK_WHOLE; return PACK_WHOLE;
} }

View File

@ -67,10 +67,13 @@
import java.util.List; import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
import java.util.Map; import java.util.Map;
import java.util.Set;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.MutableObjectId; import org.eclipse.jgit.lib.MutableObjectId;
@ -86,6 +89,8 @@
import org.eclipse.jgit.treewalk.CanonicalTreeParser; import org.eclipse.jgit.treewalk.CanonicalTreeParser;
import org.eclipse.jgit.util.LongList; import org.eclipse.jgit.util.LongList;
import com.google.protobuf.ByteString;
/** Parses the pack stream into chunks, and indexes the chunks for lookup. */ /** Parses the pack stream into chunks, and indexes the chunks for lookup. */
public class DhtPackParser extends PackParser { public class DhtPackParser extends PackParser {
private final DhtObjDatabase objdb; private final DhtObjDatabase objdb;
@ -112,7 +117,7 @@ public class DhtPackParser extends PackParser {
private Edges[] openEdges; private Edges[] openEdges;
/** Prior chunks that were written, keyed by object type code. */ /** Prior chunks that were written, keyed by object type code. */
private List<ChunkInfo>[] infoByOrder; private List<ChunkKey>[] chunkByOrder;
/** Information on chunks already written out. */ /** Information on chunks already written out. */
private Map<ChunkKey, ChunkInfo> infoByKey; private Map<ChunkKey, ChunkInfo> infoByKey;
@ -199,7 +204,7 @@ public class DhtPackParser extends PackParser {
dbWriteBuffer = db.newWriteBuffer(); dbWriteBuffer = db.newWriteBuffer();
openChunks = new ChunkFormatter[5]; openChunks = new ChunkFormatter[5];
openEdges = new Edges[5]; openEdges = new Edges[5];
infoByOrder = newListArray(5); chunkByOrder = newListArray(5);
infoByKey = new HashMap<ChunkKey, ChunkInfo>(); infoByKey = new HashMap<ChunkKey, ChunkInfo>();
dirtyMeta = new HashMap<ChunkKey, ChunkMeta>(); dirtyMeta = new HashMap<ChunkKey, ChunkMeta>();
chunkMeta = new HashMap<ChunkKey, ChunkMeta>(); chunkMeta = new HashMap<ChunkKey, ChunkMeta>();
@ -306,7 +311,7 @@ public PackLock parse(ProgressMonitor receiving, ProgressMonitor resolving)
if (!success) if (!success)
rollback(); rollback();
infoByOrder = null; chunkByOrder = null;
objectListByName = null; objectListByName = null;
objectListByChunk = null; objectListByChunk = null;
linkIterators = null; linkIterators = null;
@ -332,54 +337,74 @@ public int compare(PackedObjectInfo o1, PackedObjectInfo o2) {
} }
private void putCachedPack() throws DhtException { private void putCachedPack() throws DhtException {
CachedPackInfo info = new CachedPackInfo(); CachedPackInfo.Builder info = CachedPackInfo.newBuilder();
for (DhtInfo obj : objectMap) { for (DhtInfo obj : objectMap) {
if (!obj.isInPack()) if (!obj.isInPack())
return; return;
if (!obj.isReferenced()) if (!obj.isReferenced())
info.tips.add(obj.copy()); info.getTipListBuilder().addObjectName(obj.name());
} }
MessageDigest version = Constants.newMessageDigest(); MessageDigest version = Constants.newMessageDigest();
addChunkList(info, version, infoByOrder[OBJ_TAG]); addChunkList(info, version, chunkByOrder[OBJ_TAG]);
addChunkList(info, version, infoByOrder[OBJ_COMMIT]); addChunkList(info, version, chunkByOrder[OBJ_COMMIT]);
addChunkList(info, version, infoByOrder[OBJ_TREE]); addChunkList(info, version, chunkByOrder[OBJ_TREE]);
addChunkList(info, version, infoByOrder[OBJ_BLOB]); addChunkList(info, version, chunkByOrder[OBJ_BLOB]);
info.name = computePackName(); info.setName(computePackName().name());
info.version = ObjectId.fromRaw(version.digest()); info.setVersion(ObjectId.fromRaw(version.digest()).name());
cachedPackKey = info.getRowKey(); cachedPackKey = CachedPackKey.fromInfo(info.build());
for (List<ChunkInfo> list : infoByOrder) { for (List<ChunkKey> list : chunkByOrder) {
if (list == null) if (list == null)
continue; continue;
for (ChunkInfo c : list) { for (ChunkKey key : list) {
c.cachedPack = cachedPackKey; ChunkInfo oldInfo = infoByKey.get(key);
if (c.isFragment()) GitStore.ChunkInfo.Builder b =
db.repository().put(repo, info, dbWriteBuffer); GitStore.ChunkInfo.newBuilder(oldInfo.getData());
b.setCachedPackKey(cachedPackKey.asString());
ChunkInfo newInfo = new ChunkInfo(key, b.build());
infoByKey.put(key, newInfo);
// A fragment was already put, and has to be re-put.
// Non-fragments will put later and do not put now.
if (newInfo.getData().getIsFragment())
db.repository().put(repo, newInfo, dbWriteBuffer);
} }
} }
db.repository().put(repo, info, dbWriteBuffer); db.repository().put(repo, info.build(), dbWriteBuffer);
} }
private void addChunkList(CachedPackInfo info, MessageDigest version, private void addChunkList(CachedPackInfo.Builder info,
List<ChunkInfo> list) { MessageDigest version, List<ChunkKey> list) {
if (list == null) if (list == null)
return; return;
long bytesTotal = info.getBytesTotal();
long objectsTotal = info.getObjectsTotal();
long objectsDelta = info.getObjectsDelta();
byte[] buf = new byte[Constants.OBJECT_ID_LENGTH]; byte[] buf = new byte[Constants.OBJECT_ID_LENGTH];
for (ChunkInfo c : list) { for (ChunkKey key : list) {
int len = c.chunkSize - ChunkFormatter.TRAILER_SIZE; ChunkInfo chunkInfo = infoByKey.get(key);
info.bytesTotal += len; GitStore.ChunkInfo c = chunkInfo.getData();
info.objectsTotal += c.objectsTotal; int len = c.getChunkSize() - ChunkFormatter.TRAILER_SIZE;
info.objectsDelta += c.objectsOfsDelta; bytesTotal += len;
info.objectsDelta += c.objectsRefDelta; objectsTotal += c.getObjectCounts().getTotal();
info.chunks.add(c.getChunkKey()); objectsDelta += c.getObjectCounts().getOfsDelta();
c.getChunkKey().getChunkHash().copyRawTo(buf, 0); objectsDelta += c.getObjectCounts().getRefDelta();
info.getChunkListBuilder().addChunkKey(
chunkInfo.getChunkKey().asString());
chunkInfo.getChunkKey().getChunkHash().copyRawTo(buf, 0);
version.update(buf); version.update(buf);
} }
info.setBytesTotal(bytesTotal);
info.setObjectsTotal(objectsTotal);
info.setObjectsDelta(objectsDelta);
} }
private ObjectId computePackName() { private ObjectId computePackName() {
@ -420,10 +445,10 @@ private void rollback() throws DhtException {
} }
} }
deleteChunks(infoByOrder[OBJ_COMMIT]); deleteChunks(chunkByOrder[OBJ_COMMIT]);
deleteChunks(infoByOrder[OBJ_TREE]); deleteChunks(chunkByOrder[OBJ_TREE]);
deleteChunks(infoByOrder[OBJ_BLOB]); deleteChunks(chunkByOrder[OBJ_BLOB]);
deleteChunks(infoByOrder[OBJ_TAG]); deleteChunks(chunkByOrder[OBJ_TAG]);
dbWriteBuffer.flush(); dbWriteBuffer.flush();
} catch (Throwable err) { } catch (Throwable err) {
@ -431,10 +456,9 @@ private void rollback() throws DhtException {
} }
} }
private void deleteChunks(List<ChunkInfo> list) throws DhtException { private void deleteChunks(List<ChunkKey> list) throws DhtException {
if (list != null) { if (list != null) {
for (ChunkInfo info : list) { for (ChunkKey key : list) {
ChunkKey key = info.getChunkKey();
db.chunk().remove(key, dbWriteBuffer); db.chunk().remove(key, dbWriteBuffer);
db.repository().remove(repo, key, dbWriteBuffer); db.repository().remove(repo, key, dbWriteBuffer);
} }
@ -605,60 +629,77 @@ private void putChunkIndexes() throws DhtException {
private void putChunkIndex(List<DhtInfo> objectList, ChunkKey key, int type) private void putChunkIndex(List<DhtInfo> objectList, ChunkKey key, int type)
throws DhtException { throws DhtException {
ChunkInfo info = infoByKey.get(key); ChunkInfo oldInfo = infoByKey.get(key);
info.objectsTotal = objectList.size(); GitStore.ChunkInfo.Builder info
info.objectType = type; = GitStore.ChunkInfo.newBuilder(oldInfo.getData());
PackChunk.Members builder = new PackChunk.Members(); PackChunk.Members builder = new PackChunk.Members();
builder.setChunkKey(key); builder.setChunkKey(key);
byte[] index = ChunkIndex.create(objectList); byte[] index = ChunkIndex.create(objectList);
info.indexSize = index.length; info.setIndexSize(index.length);
builder.setChunkIndex(index); builder.setChunkIndex(index);
ChunkMeta meta = dirtyMeta.remove(key); ChunkMeta meta = dirtyMeta.remove(key);
if (meta == null) if (meta == null)
meta = chunkMeta.get(key); meta = chunkMeta.get(key);
if (meta == null)
meta = new ChunkMeta(key);
switch (type) { switch (type) {
case OBJ_COMMIT: { case OBJ_COMMIT: {
Edges edges = chunkEdges.get(key); Edges edges = chunkEdges.get(key);
if (edges != null) { List<ChunkKey> e = edges != null ? edges.commitEdges : null;
List<ChunkKey> e = edges.commitEdges; List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT);
List<ChunkKey> s = sequentialHint(key, OBJ_COMMIT); if (e == null)
meta.commitPrefetch = new ChunkMeta.PrefetchHint(e, s); e = Collections.emptyList();
if (s == null)
s = Collections.emptyList();
if (!e.isEmpty() || !s.isEmpty()) {
ChunkMeta.Builder m = edit(meta);
ChunkMeta.PrefetchHint.Builder h = m.getCommitPrefetchBuilder();
for (ChunkKey k : e)
h.addEdge(k.asString());
for (ChunkKey k : s)
h.addSequential(k.asString());
meta = m.build();
} }
break; break;
} }
case OBJ_TREE: { case OBJ_TREE: {
List<ChunkKey> s = sequentialHint(key, OBJ_TREE); List<ChunkKey> s = sequentialHint(key, OBJ_TREE);
meta.treePrefetch = new ChunkMeta.PrefetchHint(null, s); if (s == null)
s = Collections.emptyList();
if (!s.isEmpty()) {
ChunkMeta.Builder m = edit(meta);
ChunkMeta.PrefetchHint.Builder h = m.getTreePrefetchBuilder();
for (ChunkKey k : s)
h.addSequential(k.asString());
meta = m.build();
}
break; break;
} }
} }
if (meta.isEmpty()) { if (meta != null) {
info.metaSize = 0; info.setMetaSize(meta.getSerializedSize());
} else {
info.metaSize = meta.asBytes().length;
builder.setMeta(meta); builder.setMeta(meta);
} }
db.repository().put(repo, info, dbWriteBuffer); ChunkInfo newInfo = new ChunkInfo(key, info.build());
infoByKey.put(key, newInfo);
db.repository().put(repo, newInfo, dbWriteBuffer);
db.chunk().put(builder, dbWriteBuffer); db.chunk().put(builder, dbWriteBuffer);
} }
private static ChunkMeta.Builder edit(ChunkMeta meta) {
if (meta != null)
return ChunkMeta.newBuilder(meta);
return ChunkMeta.newBuilder();
}
private List<ChunkKey> sequentialHint(ChunkKey key, int typeCode) { private List<ChunkKey> sequentialHint(ChunkKey key, int typeCode) {
List<ChunkInfo> infoList = infoByOrder[typeCode]; List<ChunkKey> all = chunkByOrder[typeCode];
if (infoList == null) if (all == null)
return null; return null;
List<ChunkKey> all = new ArrayList<ChunkKey>(infoList.size());
for (ChunkInfo info : infoList)
all.add(info.getChunkKey());
int idx = all.indexOf(key); int idx = all.indexOf(key);
if (0 <= idx) { if (0 <= idx) {
int max = options.getPrefetchDepth(); int max = options.getPrefetchDepth();
@ -669,10 +710,10 @@ private List<ChunkKey> sequentialHint(ChunkKey key, int typeCode) {
} }
private void putDirtyMeta() throws DhtException { private void putDirtyMeta() throws DhtException {
for (ChunkMeta meta : dirtyMeta.values()) { for (Map.Entry<ChunkKey, ChunkMeta> meta : dirtyMeta.entrySet()) {
PackChunk.Members builder = new PackChunk.Members(); PackChunk.Members builder = new PackChunk.Members();
builder.setChunkKey(meta.getChunkKey()); builder.setChunkKey(meta.getKey());
builder.setMeta(meta); builder.setMeta(meta.getValue());
db.chunk().put(builder, dbWriteBuffer); db.chunk().put(builder, dbWriteBuffer);
} }
} }
@ -892,15 +933,15 @@ protected void onObjectData(Source src, byte[] raw, int pos, int len)
private boolean longOfsDelta(ChunkFormatter w, long infSize, long basePtr) { private boolean longOfsDelta(ChunkFormatter w, long infSize, long basePtr) {
final int type = typeOf(basePtr); final int type = typeOf(basePtr);
final List<ChunkInfo> infoList = infoByOrder[type]; final List<ChunkKey> infoList = chunkByOrder[type];
final int baseIdx = chunkIdx(basePtr); final int baseIdx = chunkIdx(basePtr);
final ChunkInfo baseInfo = infoList.get(baseIdx); final ChunkInfo baseInfo = infoByKey.get(infoList.get(baseIdx));
// Go backwards to the start of the base's chunk. // Go backwards to the start of the base's chunk.
long relativeChunkStart = 0; long relativeChunkStart = 0;
for (int i = infoList.size() - 1; baseIdx <= i; i--) { for (int i = infoList.size() - 1; baseIdx <= i; i--) {
ChunkInfo info = infoList.get(i); GitStore.ChunkInfo info = infoByKey.get(infoList.get(i)).getData();
int packSize = info.chunkSize - ChunkFormatter.TRAILER_SIZE; int packSize = info.getChunkSize() - ChunkFormatter.TRAILER_SIZE;
relativeChunkStart += packSize; relativeChunkStart += packSize;
} }
@ -940,14 +981,24 @@ private void endFragmentedObject() throws DhtException {
if (lastKey != null) if (lastKey != null)
currFragments.add(lastKey); currFragments.add(lastKey);
ChunkMeta.Builder protoBuilder = ChunkMeta.newBuilder();
for (ChunkKey key : currFragments)
protoBuilder.addFragment(key.asString());
ChunkMeta protoMeta = protoBuilder.build();
for (ChunkKey key : currFragments) { for (ChunkKey key : currFragments) {
ChunkMeta meta = chunkMeta.get(key); ChunkMeta oldMeta = chunkMeta.get(key);
if (meta == null) { if (oldMeta != null) {
meta = new ChunkMeta(key); ChunkMeta.Builder newMeta = ChunkMeta.newBuilder(oldMeta);
newMeta.clearFragment();
newMeta.mergeFrom(protoMeta);
ChunkMeta meta = newMeta.build();
dirtyMeta.put(key, meta);
chunkMeta.put(key, meta); chunkMeta.put(key, meta);
} else {
dirtyMeta.put(key, protoMeta);
chunkMeta.put(key, protoMeta);
} }
meta.fragments = currFragments;
dirtyMeta.put(key, meta);
} }
currFragments = null; currFragments = null;
} }
@ -1093,7 +1144,7 @@ protected int readDatabase(byte[] dst, int pos, int cnt) throws IOException {
if (meta == null) if (meta == null)
return 0; return 0;
ChunkKey next = meta.getNextFragment(dbChunk.getChunkKey()); ChunkKey next = ChunkMetaUtil.getNextFragment(meta, dbChunk.getChunkKey());
if (next == null) if (next == null)
return 0; return 0;
@ -1200,7 +1251,7 @@ private ChunkFormatter openChunk(int typeCode) throws DhtException {
ChunkFormatter w = openChunks[typeCode]; ChunkFormatter w = openChunks[typeCode];
if (w == null) { if (w == null) {
w = new ChunkFormatter(repo, options); w = new ChunkFormatter(repo, options);
w.setSource(ChunkInfo.Source.RECEIVE); w.setSource(GitStore.ChunkInfo.Source.RECEIVE);
w.setObjectType(typeCode); w.setObjectType(typeCode);
openChunks[typeCode] = w; openChunks[typeCode] = w;
} }
@ -1221,9 +1272,9 @@ private ChunkKey endChunk(int typeCode) throws DhtException {
ChunkKey key = w.end(chunkKeyDigest); ChunkKey key = w.end(chunkKeyDigest);
ChunkInfo info = w.getChunkInfo(); ChunkInfo info = w.getChunkInfo();
if (infoByOrder[typeCode] == null) if (chunkByOrder[typeCode] == null)
infoByOrder[typeCode] = new ArrayList<ChunkInfo>(); chunkByOrder[typeCode] = new ArrayList<ChunkKey>();
infoByOrder[typeCode].add(info); chunkByOrder[typeCode].add(key);
infoByKey.put(key, info); infoByKey.put(key, info);
if (w.getChunkMeta() != null) if (w.getChunkMeta() != null)
@ -1260,7 +1311,7 @@ else if (streamPosition == pos)
} }
private long makeObjectPointer(ChunkFormatter w, int typeCode) { private long makeObjectPointer(ChunkFormatter w, int typeCode) {
List<ChunkInfo> list = infoByOrder[typeCode]; List<ChunkKey> list = chunkByOrder[typeCode];
int idx = list == null ? 0 : list.size(); int idx = list == null ? 0 : list.size();
int ptr = w.position(); int ptr = w.position();
return (((long) typeCode) << 61) | (((long) idx) << 32) | ptr; return (((long) typeCode) << 61) | (((long) idx) << 32) | ptr;
@ -1279,14 +1330,14 @@ private static int offsetOf(long objectPtr) {
} }
private boolean isInCurrentChunk(long objectPtr) { private boolean isInCurrentChunk(long objectPtr) {
List<ChunkInfo> list = infoByOrder[typeOf(objectPtr)]; List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)];
if (list == null) if (list == null)
return chunkIdx(objectPtr) == 0; return chunkIdx(objectPtr) == 0;
return chunkIdx(objectPtr) == list.size(); return chunkIdx(objectPtr) == list.size();
} }
private ChunkKey chunkOf(long objectPtr) throws DhtException { private ChunkKey chunkOf(long objectPtr) throws DhtException {
List<ChunkInfo> list = infoByOrder[typeOf(objectPtr)]; List<ChunkKey> list = chunkByOrder[typeOf(objectPtr)];
int idx = chunkIdx(objectPtr); int idx = chunkIdx(objectPtr);
if (list == null || list.size() <= idx) { if (list == null || list.size() <= idx) {
throw new DhtException(MessageFormat.format( throw new DhtException(MessageFormat.format(
@ -1295,7 +1346,7 @@ private ChunkKey chunkOf(long objectPtr) throws DhtException {
Integer.valueOf(idx), // Integer.valueOf(idx), //
Integer.valueOf(offsetOf(objectPtr)))); Integer.valueOf(offsetOf(objectPtr))));
} }
return list.get(idx).getChunkKey(); return list.get(idx);
} }
private static DhtException panicCannotInsert() { private static DhtException panicCannotInsert() {
@ -1349,8 +1400,19 @@ void setType(int type) {
} }
ObjectInfo info(ChunkKey chunkKey) { ObjectInfo info(ChunkKey chunkKey) {
return new ObjectInfo(chunkKey, -1, getType(), offsetOf(chunkPtr), GitStore.ObjectInfo.Builder b = GitStore.ObjectInfo.newBuilder();
packedSize, inflatedSize, base, isFragmented()); b.setObjectType(GitStore.ObjectInfo.ObjectType.valueOf(getType()));
b.setOffset(offsetOf(chunkPtr));
b.setPackedSize(packedSize);
b.setInflatedSize(inflatedSize);
if (base != null) {
byte[] t = new byte[Constants.OBJECT_ID_LENGTH];
base.copyRawTo(t, 0);
b.setDeltaBase(ByteString.copyFrom(t));
}
if (isFragmented())
b.setIsFragmented(true);
return new ObjectInfo(chunkKey, b.build());
} }
} }

View File

@ -63,6 +63,7 @@
import org.eclipse.jgit.errors.IncorrectObjectTypeException; import org.eclipse.jgit.errors.IncorrectObjectTypeException;
import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException; import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.lib.AbbreviatedObjectId; import org.eclipse.jgit.lib.AbbreviatedObjectId;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.AsyncObjectLoaderQueue; import org.eclipse.jgit.lib.AsyncObjectLoaderQueue;
@ -76,7 +77,6 @@
import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevObject; import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.dht.RefData.IdWithChunk;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.storage.dht.spi.Database;
import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable; import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
@ -186,7 +186,7 @@ public ObjectReader newReader() {
@Override @Override
public boolean has(AnyObjectId objId, int typeHint) throws IOException { public boolean has(AnyObjectId objId, int typeHint) throws IOException {
if (objId instanceof RefData.IdWithChunk) if (objId instanceof RefDataUtil.IdWithChunk)
return true; return true;
if (recentChunks.has(repo, objId)) if (recentChunks.has(repo, objId))
@ -283,8 +283,8 @@ ChunkAndOffset getChunk(AnyObjectId objId, int typeHint, boolean checkRecent)
} }
ChunkKey key; ChunkKey key;
if (objId instanceof RefData.IdWithChunk) if (objId instanceof RefDataUtil.IdWithChunk)
key = ((RefData.IdWithChunk) objId).getChunkKey(); key = ((RefDataUtil.IdWithChunk) objId).getChunkKey();
else else
key = repository.getRefDatabase().findChunk(objId); key = repository.getRefDatabase().findChunk(objId);
@ -331,8 +331,8 @@ ChunkAndOffset getChunk(AnyObjectId objId, int typeHint, boolean checkRecent)
} }
ChunkKey findChunk(AnyObjectId objId) throws DhtException { ChunkKey findChunk(AnyObjectId objId) throws DhtException {
if (objId instanceof IdWithChunk) if (objId instanceof RefDataUtil.IdWithChunk)
return ((IdWithChunk) objId).getChunkKey(); return ((RefDataUtil.IdWithChunk) objId).getChunkKey();
ChunkKey key = repository.getRefDatabase().findChunk(objId); ChunkKey key = repository.getRefDatabase().findChunk(objId);
if (key != null) if (key != null)

View File

@ -55,6 +55,7 @@
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectIdRef.PeeledNonTag; import org.eclipse.jgit.lib.ObjectIdRef.PeeledNonTag;
@ -68,6 +69,7 @@
import org.eclipse.jgit.revwalk.RevObject; import org.eclipse.jgit.revwalk.RevObject;
import org.eclipse.jgit.revwalk.RevTag; import org.eclipse.jgit.revwalk.RevTag;
import org.eclipse.jgit.revwalk.RevWalk; import org.eclipse.jgit.revwalk.RevWalk;
import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.storage.dht.spi.Database;
import org.eclipse.jgit.util.RefList; import org.eclipse.jgit.util.RefList;
@ -94,7 +96,7 @@ DhtRepository getRepository() {
ChunkKey findChunk(AnyObjectId id) { ChunkKey findChunk(AnyObjectId id) {
RefCache c = cache.get(); RefCache c = cache.get();
if (c != null) { if (c != null) {
RefData.IdWithChunk i = c.hints.get(id); IdWithChunk i = c.hints.get(id);
if (i != null) if (i != null)
return i.getChunkKey(); return i.getChunkKey();
} }
@ -190,8 +192,8 @@ private void cachePeeledState(Ref oldLeaf, Ref newLeaf) {
try { try {
RepositoryKey repo = repository.getRepositoryKey(); RepositoryKey repo = repository.getRepositoryKey();
RefKey key = RefKey.create(repo, newLeaf.getName()); RefKey key = RefKey.create(repo, newLeaf.getName());
RefData oldData = RefData.fromRef(oldLeaf); RefData oldData = RefDataUtil.fromRef(oldLeaf);
RefData newData = RefData.fromRef(newLeaf); RefData newData = RefDataUtil.fromRef(newLeaf);
db.ref().compareAndPut(key, oldData, newData); db.ref().compareAndPut(key, oldData, newData);
} catch (TimeoutException e) { } catch (TimeoutException e) {
// Ignore a timeout here, we were only trying to update // Ignore a timeout here, we were only trying to update
@ -214,13 +216,12 @@ private Ref doPeel(final Ref leaf) throws MissingObjectException,
ChunkKey key = ctx.findChunk(oId); ChunkKey key = ctx.findChunk(oId);
if (key != null) if (key != null)
oId = new RefData.IdWithChunk(oId, key); oId = new IdWithChunk(oId, key);
if (obj instanceof RevTag) { if (obj instanceof RevTag) {
ObjectId pId = rw.peel(obj); ObjectId pId = rw.peel(obj);
key = ctx.findChunk(pId); key = ctx.findChunk(pId);
pId = key != null ? new RefData.IdWithChunk(pId, key) : pId pId = key != null ? new IdWithChunk(pId, key) : pId.copy();
.copy();
return new PeeledTag(leaf.getStorage(), name, oId, pId); return new PeeledTag(leaf.getStorage(), name, oId, pId);
} else { } else {
return new PeeledNonTag(leaf.getStorage(), name, oId); return new PeeledNonTag(leaf.getStorage(), name, oId);
@ -353,7 +354,7 @@ private RefCache readRefs() throws DhtException {
private RefCache read() throws DhtException, TimeoutException { private RefCache read() throws DhtException, TimeoutException {
RefList.Builder<Ref> id = new RefList.Builder<Ref>(); RefList.Builder<Ref> id = new RefList.Builder<Ref>();
RefList.Builder<Ref> sym = new RefList.Builder<Ref>(); RefList.Builder<Ref> sym = new RefList.Builder<Ref>();
ObjectIdSubclassMap<RefData.IdWithChunk> hints = new ObjectIdSubclassMap<RefData.IdWithChunk>(); ObjectIdSubclassMap<IdWithChunk> hints = new ObjectIdSubclassMap<IdWithChunk>();
for (Map.Entry<RefKey, RefData> e : scan()) { for (Map.Entry<RefKey, RefData> e : scan()) {
Ref ref = fromData(e.getKey().getName(), e.getValue()); Ref ref = fromData(e.getKey().getName(), e.getValue());
@ -362,12 +363,12 @@ private RefCache read() throws DhtException, TimeoutException {
sym.add(ref); sym.add(ref);
id.add(ref); id.add(ref);
if (ref.getObjectId() instanceof RefData.IdWithChunk if (ref.getObjectId() instanceof IdWithChunk
&& !hints.contains(ref.getObjectId())) && !hints.contains(ref.getObjectId()))
hints.add((RefData.IdWithChunk) ref.getObjectId()); hints.add((IdWithChunk) ref.getObjectId());
if (ref.getPeeledObjectId() instanceof RefData.IdWithChunk if (ref.getPeeledObjectId() instanceof IdWithChunk
&& !hints.contains(ref.getPeeledObjectId())) && !hints.contains(ref.getPeeledObjectId()))
hints.add((RefData.IdWithChunk) ref.getPeeledObjectId()); hints.add((IdWithChunk) ref.getPeeledObjectId());
} }
id.sort(); id.sort();
@ -377,40 +378,20 @@ private RefCache read() throws DhtException, TimeoutException {
} }
private static Ref fromData(String name, RefData data) { private static Ref fromData(String name, RefData data) {
ObjectId oId = null; if (data.hasSymref()) {
boolean peeled = false; Ref leaf = new Unpeeled(NEW, data.getSymref(), null);
ObjectId pId = null; return new SymbolicRef(name, leaf);
TinyProtobuf.Decoder d = data.decode();
DECODE: for (;;) {
switch (d.next()) {
case 0:
break DECODE;
case RefData.TAG_SYMREF: {
String symref = d.string();
Ref leaf = new Unpeeled(NEW, symref, null);
return new SymbolicRef(name, leaf);
}
case RefData.TAG_TARGET:
oId = RefData.IdWithChunk.decode(d.message());
continue;
case RefData.TAG_IS_PEELED:
peeled = d.bool();
continue;
case RefData.TAG_PEELED:
pId = RefData.IdWithChunk.decode(d.message());
continue;
default:
d.skip();
continue;
}
} }
if (peeled && pId != null) if (!data.hasTarget())
return new Unpeeled(LOOSE, name, null);
ObjectId oId = IdWithChunk.create(data.getTarget());
if (data.getIsPeeled() && data.hasPeeled()) {
ObjectId pId = IdWithChunk.create(data.getPeeled());
return new PeeledTag(LOOSE, name, oId, pId); return new PeeledTag(LOOSE, name, oId, pId);
if (peeled) }
if (data.getIsPeeled())
return new PeeledNonTag(LOOSE, name, oId); return new PeeledNonTag(LOOSE, name, oId);
return new Unpeeled(LOOSE, name, oId); return new Unpeeled(LOOSE, name, oId);
} }
@ -427,10 +408,10 @@ private static class RefCache {
final RefList<Ref> sym; final RefList<Ref> sym;
final ObjectIdSubclassMap<RefData.IdWithChunk> hints; final ObjectIdSubclassMap<IdWithChunk> hints;
RefCache(RefList<Ref> ids, RefList<Ref> sym, RefCache(RefList<Ref> ids, RefList<Ref> sym,
ObjectIdSubclassMap<RefData.IdWithChunk> hints) { ObjectIdSubclassMap<IdWithChunk> hints) {
this.ids = ids; this.ids = ids;
this.sym = sym; this.sym = sym;
this.hints = hints; this.hints = hints;

View File

@ -47,6 +47,7 @@
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref; import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.lib.RefUpdate; import org.eclipse.jgit.lib.RefUpdate;
@ -106,7 +107,7 @@ protected boolean tryLock(boolean deref) throws IOException {
dstRef = dstRef.getLeaf(); dstRef = dstRef.getLeaf();
refKey = RefKey.create(repo, dstRef.getName()); refKey = RefKey.create(repo, dstRef.getName());
oldData = RefData.fromRef(dstRef); oldData = RefDataUtil.fromRef(dstRef);
if (dstRef.isSymbolic()) if (dstRef.isSymbolic())
setOldObjectId(null); setOldObjectId(null);
@ -157,7 +158,7 @@ protected Result doDelete(Result desiredResult) throws IOException {
@Override @Override
protected Result doLink(String target) throws IOException { protected Result doLink(String target) throws IOException {
try { try {
newData = RefData.symbolic(target); newData = RefDataUtil.symbolic(target);
boolean r = db.ref().compareAndPut(refKey, oldData, newData); boolean r = db.ref().compareAndPut(refKey, oldData, newData);
if (r) { if (r) {
getRefDatabase().stored(dstRef.getName(), newData); getRefDatabase().stored(dstRef.getName(), newData);
@ -181,19 +182,19 @@ private RefData newData() throws IOException {
ChunkKey key = ctx.findChunk(newId); ChunkKey key = ctx.findChunk(newId);
if (key != null) if (key != null)
newId = new RefData.IdWithChunk(newId, key); newId = new RefDataUtil.IdWithChunk(newId, key);
if (obj instanceof RevTag) { if (obj instanceof RevTag) {
ObjectId pId = rw.peel(obj); ObjectId pId = rw.peel(obj);
key = ctx.findChunk(pId); key = ctx.findChunk(pId);
pId = key != null ? new RefData.IdWithChunk(pId, key) : pId; pId = key != null ? new RefDataUtil.IdWithChunk(pId, key) : pId;
return RefData.peeled(newId, pId); return RefDataUtil.peeled(newId, pId);
} else if (obj != null) } else if (obj != null)
return RefData.peeled(newId, null); return RefDataUtil.peeled(newId, null);
else else
return RefData.id(newId); return RefDataUtil.id(newId);
} catch (MissingObjectException e) { } catch (MissingObjectException e) {
return RefData.id(newId); return RefDataUtil.id(newId);
} }
} }
} }

View File

@ -59,9 +59,12 @@ public static DhtText get() {
/***/ public String cycleInDeltaChain; /***/ public String cycleInDeltaChain;
/***/ public String databaseRequired; /***/ public String databaseRequired;
/***/ public String expectedObjectSizeDuringCopyAsIs; /***/ public String expectedObjectSizeDuringCopyAsIs;
/***/ public String invalidCachedPackInfo;
/***/ public String invalidChunkKey; /***/ public String invalidChunkKey;
/***/ public String invalidChunkMeta;
/***/ public String invalidObjectIndexKey; /***/ public String invalidObjectIndexKey;
/***/ public String invalidObjectInfo; /***/ public String invalidObjectInfo;
/***/ public String invalidRefData;
/***/ public String missingChunk; /***/ public String missingChunk;
/***/ public String missingLongOffsetBase; /***/ public String missingLongOffsetBase;
/***/ public String nameRequired; /***/ public String nameRequired;
@ -72,12 +75,6 @@ public static DhtText get() {
/***/ public String objectTypeUnknown; /***/ public String objectTypeUnknown;
/***/ public String packParserInvalidPointer; /***/ public String packParserInvalidPointer;
/***/ public String packParserRollbackFailed; /***/ public String packParserRollbackFailed;
/***/ public String protobufNegativeValuesNotSupported;
/***/ public String protobufNoArray;
/***/ public String protobufNotBooleanValue;
/***/ public String protobufUnsupportedFieldType;
/***/ public String protobufWrongFieldLength;
/***/ public String protobufWrongFieldType;
/***/ public String recordingObjects; /***/ public String recordingObjects;
/***/ public String repositoryAlreadyExists; /***/ public String repositoryAlreadyExists;
/***/ public String repositoryMustBeBare; /***/ public String repositoryMustBeBare;

View File

@ -50,6 +50,7 @@
import org.eclipse.jgit.errors.LargeObjectException; import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.lib.ObjectStream; import org.eclipse.jgit.lib.ObjectStream;
@ -102,7 +103,7 @@ public ObjectStream openStream() throws MissingObjectException, IOException {
if (pc != null) if (pc != null)
firstChunk = null; firstChunk = null;
else else
pc = ctx.getChunk(meta.getFragmentKey(0)); pc = ctx.getChunk(ChunkKey.fromString(meta.getFragment(0)));
InputStream in = new ChunkInputStream(meta, ctx, pos, pc); InputStream in = new ChunkInputStream(meta, ctx, pos, pc);
in = new BufferedInputStream(new InflaterInputStream(in), 8192); in = new BufferedInputStream(new InflaterInputStream(in), 8192);
@ -138,7 +139,8 @@ public int read(byte[] dstbuf, int dstptr, int dstlen)
if (fragment == meta.getFragmentCount()) if (fragment == meta.getFragmentCount())
return -1; return -1;
pc = ctx.getChunk(meta.getFragmentKey(++fragment)); pc = ctx.getChunk(ChunkKey.fromString(
meta.getFragment(++fragment)));
ptr = 0; ptr = 0;
n = pc.read(ptr, dstbuf, dstptr, dstlen); n = pc.read(ptr, dstbuf, dstptr, dstlen);
if (n == 0) if (n == 0)

View File

@ -43,13 +43,12 @@
package org.eclipse.jgit.storage.dht; package org.eclipse.jgit.storage.dht;
import java.text.MessageFormat;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
/** Connects an object to the chunk it is stored in. */ /** Connects an object to the chunk it is stored in. */
@ -71,106 +70,40 @@ public static void sort(List<ObjectInfo> toSort) {
Collections.sort(toSort, BY_TIME); Collections.sort(toSort, BY_TIME);
} }
private final ChunkKey chunk;
private final long time;
private final GitStore.ObjectInfo data;
/** /**
* Parse an ObjectInfo from the storage system. * Wrap an ObjectInfo from the storage system.
* *
* @param chunkKey * @param chunkKey
* the chunk the object points to. * the chunk the object points to.
* @param data * @param data
* the data of the ObjectInfo. * the data of the ObjectInfo.
* @param time
* timestamp of the ObjectInfo. If the implementation does not
* store timestamp data, supply a negative value.
* @return the object's information.
*/ */
public static ObjectInfo fromBytes(ChunkKey chunkKey, byte[] data, long time) { public ObjectInfo(ChunkKey chunkKey, GitStore.ObjectInfo data) {
return fromBytes(chunkKey, TinyProtobuf.decode(data), time); this.chunk = chunkKey;
this.time = 0;
this.data = data;
} }
/** /**
* Parse an ObjectInfo from the storage system. * Wrap an ObjectInfo from the storage system.
* *
* @param chunkKey * @param chunkKey
* the chunk the object points to. * the chunk the object points to.
* @param d
* the data of the ObjectInfo.
* @param time * @param time
* timestamp of the ObjectInfo. If the implementation does not * timestamp of the ObjectInfo.
* store timestamp data, supply a negative value. * @param data
* @return the object's information. * the data of the ObjectInfo.
*/ */
public static ObjectInfo fromBytes(ChunkKey chunkKey, public ObjectInfo(ChunkKey chunkKey, long time, GitStore.ObjectInfo data) {
TinyProtobuf.Decoder d, long time) { this.chunk = chunkKey;
int typeCode = -1;
int offset = -1;
long packedSize = -1;
long inflatedSize = -1;
ObjectId deltaBase = null;
boolean fragmented = false;
PARSE: for (;;) {
switch (d.next()) {
case 0:
break PARSE;
case 1:
typeCode = d.int32();
continue;
case 2:
offset = d.int32();
continue;
case 3:
packedSize = d.int64();
continue;
case 4:
inflatedSize = d.int64();
continue;
case 5:
deltaBase = d.bytesObjectId();
continue;
case 6:
fragmented = d.bool();
continue;
default:
d.skip();
continue;
}
}
if (typeCode < 0 || offset < 0 || packedSize < 0 || inflatedSize < 0)
throw new IllegalArgumentException(MessageFormat.format(
DhtText.get().invalidObjectInfo, chunkKey));
return new ObjectInfo(chunkKey, time, typeCode, offset, //
packedSize, inflatedSize, deltaBase, fragmented);
}
private final ChunkKey chunk;
private final long time;
private final int typeCode;
private final int offset;
private final long packedSize;
private final long inflatedSize;
private final ObjectId deltaBase;
private final boolean fragmented;
ObjectInfo(ChunkKey chunk, long time, int typeCode, int offset,
long packedSize, long inflatedSize, ObjectId base,
boolean fragmented) {
this.chunk = chunk;
this.time = time < 0 ? 0 : time; this.time = time < 0 ? 0 : time;
this.typeCode = typeCode; this.data = data;
this.offset = offset;
this.packedSize = packedSize;
this.inflatedSize = inflatedSize;
this.deltaBase = base;
this.fragmented = fragmented;
} }
/** @return the chunk this link points to. */ /** @return the chunk this link points to. */
@ -183,54 +116,43 @@ public long getTime() {
return time; return time;
} }
/** @return GitStore.ObjectInfo to embed in the database. */
public GitStore.ObjectInfo getData() {
return data;
}
/** @return type of the object, in OBJ_* constants. */ /** @return type of the object, in OBJ_* constants. */
public int getType() { public int getType() {
return typeCode; return data.getObjectType().getNumber();
} }
/** @return size of the object when fully inflated. */ /** @return size of the object when fully inflated. */
public long getSize() { public long getSize() {
return inflatedSize; return data.getInflatedSize();
} }
/** @return true if the object storage uses delta compression. */ /** @return true if the object storage uses delta compression. */
public boolean isDelta() { public boolean isDelta() {
return getDeltaBase() != null; return data.hasDeltaBase();
} }
/** @return true if the object has been fragmented across chunks. */ /** @return true if the object has been fragmented across chunks. */
public boolean isFragmented() { public boolean isFragmented() {
return fragmented; return data.getIsFragmented();
} }
int getOffset() { int getOffset() {
return offset; return data.getOffset();
} }
long getPackedSize() { long getPackedSize() {
return packedSize; return data.getPackedSize();
} }
ObjectId getDeltaBase() { ObjectId getDeltaBase() {
return deltaBase; if (data.hasDeltaBase())
} return ObjectId.fromRaw(data.getDeltaBase().toByteArray(), 0);
return null;
/**
* Convert this ObjectInfo into a byte array for storage.
*
* @return the ObjectInfo data, encoded as a byte array. This does not
* include the ChunkKey, callers must store that separately.
*/
public byte[] asBytes() {
TinyProtobuf.Encoder e = TinyProtobuf.encode(256);
e.int32(1, typeCode);
e.int32(2, offset);
e.int64(3, packedSize);
e.int64(4, inflatedSize);
e.bytes(5, deltaBase);
if (fragmented)
e.bool(6, fragmented);
return e.asByteArray();
} }
@Override @Override
@ -238,18 +160,10 @@ public String toString() {
StringBuilder b = new StringBuilder(); StringBuilder b = new StringBuilder();
b.append("ObjectInfo:"); b.append("ObjectInfo:");
b.append(chunk); b.append(chunk);
b.append(" [");
if (0 < time) if (0 < time)
b.append(" time=").append(new Date(time)); b.append(" @ ").append(new Date(time));
b.append(" type=").append(Constants.typeString(typeCode)); b.append("\n");
b.append(" offset=").append(offset); b.append(data.toString());
b.append(" packedSize=").append(packedSize);
b.append(" inflatedSize=").append(inflatedSize);
if (deltaBase != null)
b.append(" deltaBase=").append(deltaBase.name());
if (fragmented)
b.append(" fragmented");
b.append(" ]");
return b.toString(); return b.toString();
} }
} }

View File

@ -43,7 +43,6 @@
package org.eclipse.jgit.storage.dht; package org.eclipse.jgit.storage.dht;
import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
import java.util.HashMap; import java.util.HashMap;
@ -55,6 +54,7 @@
import java.util.concurrent.Semaphore; import java.util.concurrent.Semaphore;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.util.BlockList; import org.eclipse.jgit.util.BlockList;
@ -136,7 +136,7 @@ void plan(List<DhtObjectToPack> list) throws DhtException {
ChunkMeta meta = allMeta.remove(key); ChunkMeta meta = allMeta.remove(key);
if (meta != null) { if (meta != null) {
for (int i = 1; i < meta.getFragmentCount(); i++) for (int i = 1; i < meta.getFragmentCount(); i++)
keys.add(meta.getFragmentKey(i)); keys.add(ChunkKey.fromString(meta.getFragment(i)));
} }
} }
order = keys; order = keys;
@ -221,7 +221,7 @@ private void awaitPendingBatches() throws InterruptedException,
throw metaError.get(); throw metaError.get();
} }
private class MetaLoader implements AsyncCallback<Collection<ChunkMeta>> { private class MetaLoader implements AsyncCallback<Map<ChunkKey, ChunkMeta>> {
private final Context context; private final Context context;
private final Set<ChunkKey> keys; private final Set<ChunkKey> keys;
@ -231,13 +231,11 @@ private class MetaLoader implements AsyncCallback<Collection<ChunkMeta>> {
this.keys = keys; this.keys = keys;
} }
public void onSuccess(Collection<ChunkMeta> result) { public void onSuccess(Map<ChunkKey, ChunkMeta> result) {
try { try {
synchronized (allMeta) { synchronized (allMeta) {
for (ChunkMeta meta : result) { allMeta.putAll(result);
allMeta.put(meta.getChunkKey(), meta); keys.removeAll(result.keySet());
keys.remove(meta.getChunkKey());
}
} }
if (context == Context.FAST_MISSING_OK && !keys.isEmpty()) { if (context == Context.FAST_MISSING_OK && !keys.isEmpty()) {
synchronized (metaMissing) { synchronized (metaMissing) {

View File

@ -63,6 +63,7 @@
import org.eclipse.jgit.errors.CorruptObjectException; import org.eclipse.jgit.errors.CorruptObjectException;
import org.eclipse.jgit.errors.LargeObjectException; import org.eclipse.jgit.errors.LargeObjectException;
import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException; import org.eclipse.jgit.errors.StoredObjectRepresentationNotAvailableException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectLoader;
@ -251,8 +252,6 @@ public PackChunk build() throws DhtException {
private volatile Boolean valid; private volatile Boolean valid;
private volatile ChunkKey nextFragment;
PackChunk(ChunkKey key, byte[] dataBuf, int dataPtr, int dataLen, PackChunk(ChunkKey key, byte[] dataBuf, int dataPtr, int dataLen,
ChunkIndex index, ChunkMeta meta) { ChunkIndex index, ChunkMeta meta) {
this.key = key; this.key = key;
@ -400,9 +399,12 @@ private static ObjectLoader read1(PackChunk pc, int pos,
base = base - pos; base = base - pos;
ChunkMeta.BaseChunk baseChunk; ChunkMeta.BaseChunk baseChunk;
baseChunk = pc.meta.getBaseChunk(base); baseChunk = ChunkMetaUtil.getBaseChunk(
baseChunkKey = baseChunk.getChunkKey(); pc.key,
basePosInChunk = (int) (baseChunk.relativeStart - base); pc.meta,
base);
baseChunkKey = ChunkKey.fromString(baseChunk.getChunkKey());
basePosInChunk = (int) (baseChunk.getRelativeStart() - base);
} }
delta = new Delta(delta, // delta = new Delta(delta, //
@ -559,7 +561,8 @@ private static byte[] inflateFragment(long sz, PackChunk pc, final int pos,
if (inf.needsInput()) { if (inf.needsInput()) {
if (meta.getFragmentCount() <= nextChunk) if (meta.getFragmentCount() <= nextChunk)
break; break;
pc = reader.getChunk(meta.getFragmentKey(nextChunk++)); pc = reader.getChunk(ChunkKey.fromString(
meta.getFragment(nextChunk++)));
if (meta.getFragmentCount() == nextChunk) if (meta.getFragmentCount() == nextChunk)
bs = pc.dataLen; // Include trailer on last chunk. bs = pc.dataLen; // Include trailer on last chunk.
else else
@ -575,7 +578,7 @@ private static byte[] inflateFragment(long sz, PackChunk pc, final int pos,
if (dstoff != sz) { if (dstoff != sz) {
throw new DataFormatException(MessageFormat.format( throw new DataFormatException(MessageFormat.format(
DhtText.get().shortCompressedObject, DhtText.get().shortCompressedObject,
meta.getChunkKey(), ChunkKey.fromString(meta.getFragment(0)),
Integer.valueOf(pos))); Integer.valueOf(pos)));
} }
return dstbuf; return dstbuf;
@ -683,7 +686,8 @@ else if (-1 == obj.size)
if (isFragment()) { if (isFragment()) {
int cnt = meta.getFragmentCount(); int cnt = meta.getFragmentCount();
for (int fragId = 1; fragId < cnt; fragId++) { for (int fragId = 1; fragId < cnt; fragId++) {
PackChunk pc = ctx.getChunk(meta.getFragmentKey(fragId)); PackChunk pc = ctx.getChunk(ChunkKey.fromString(
meta.getFragment(fragId)));
pc.copyEntireChunkAsIs(out, obj, validate); pc.copyEntireChunkAsIs(out, obj, validate);
} }
} }
@ -728,18 +732,6 @@ int getTotalSize() {
return sz; return sz;
} }
ChunkKey getNextFragment() {
if (meta == null)
return null;
ChunkKey next = nextFragment;
if (next == null) {
next = meta.getNextFragment(getChunkKey());
nextFragment = next;
}
return next;
}
private static class Delta { private static class Delta {
/** Child that applies onto this object. */ /** Child that applies onto this object. */
final Delta next; final Delta next;

View File

@ -59,6 +59,7 @@
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.revwalk.RevCommit; import org.eclipse.jgit.revwalk.RevCommit;
import org.eclipse.jgit.revwalk.RevTree; import org.eclipse.jgit.revwalk.RevTree;
@ -218,14 +219,21 @@ void push(ChunkMeta meta) {
if (hint != null) { if (hint != null) {
synchronized (this) { synchronized (this) {
if (followEdgeHints && !hint.getEdge().isEmpty()) if (followEdgeHints && 0 < hint.getEdgeCount())
push(hint.getEdge()); push(hint.getEdgeList());
else else
push(hint.getSequential()); push(hint.getSequentialList());
} }
} }
} }
private void push(List<String> list) {
List<ChunkKey> keys = new ArrayList<ChunkKey>(list.size());
for (String keyString : list)
keys.add(ChunkKey.fromString(keyString));
push(keys);
}
void push(Iterable<ChunkKey> list) { void push(Iterable<ChunkKey> list) {
synchronized (this) { synchronized (this) {
for (ChunkKey key : list) { for (ChunkKey key : list) {

View File

@ -55,7 +55,6 @@
import org.eclipse.jgit.errors.MissingObjectException; import org.eclipse.jgit.errors.MissingObjectException;
import org.eclipse.jgit.lib.AsyncOperation; import org.eclipse.jgit.lib.AsyncOperation;
import org.eclipse.jgit.lib.ObjectId; import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.storage.dht.RefData.IdWithChunk;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.Database; import org.eclipse.jgit.storage.dht.spi.Database;
@ -119,8 +118,8 @@ private Iterable<T> lookInCache(Iterable<T> objects) {
RecentInfoCache infoCache = reader.getRecentInfoCache(); RecentInfoCache infoCache = reader.getRecentInfoCache();
List<T> missing = null; List<T> missing = null;
for (T obj : objects) { for (T obj : objects) {
if (needChunkOnly && obj instanceof IdWithChunk) { if (needChunkOnly && obj instanceof RefDataUtil.IdWithChunk) {
push(obj, ((IdWithChunk) obj).getChunkKey()); push(obj, ((RefDataUtil.IdWithChunk) obj).getChunkKey());
continue; continue;
} }

View File

@ -48,7 +48,7 @@
import org.eclipse.jgit.lib.AnyObjectId; import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectLoader; import org.eclipse.jgit.lib.ObjectLoader;
import org.eclipse.jgit.storage.dht.DhtReader.ChunkAndOffset; import org.eclipse.jgit.storage.dht.DhtReader.ChunkAndOffset;
import org.eclipse.jgit.storage.dht.RefData.IdWithChunk; import org.eclipse.jgit.storage.dht.RefDataUtil.IdWithChunk;
final class RecentChunks { final class RecentChunks {
private final DhtReader reader; private final DhtReader reader;

View File

@ -1,235 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.dht;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
import static org.eclipse.jgit.storage.dht.TinyProtobuf.encode;
import java.util.Arrays;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
import org.eclipse.jgit.storage.dht.TinyProtobuf.Encoder;
/**
* Describes the current state of a Git reference.
* <p>
* The reference state contains not just the SHA-1 object name that a reference
* points to, but the state also caches its peeled value if its a tag, and the
* {@link ChunkKey} the object was observed in when the reference was last
* updated. This cached data reduces latency when initially starting to work
* with a repository.
*/
public class RefData {
/** Magic constant meaning does not exist. */
public static final RefData NONE = new RefData(new byte[0]);
static final int TAG_SYMREF = 1;
static final int TAG_TARGET = 2;
static final int TAG_IS_PEELED = 3;
static final int TAG_PEELED = 4;
/**
* @param data
* @return the content
*/
public static RefData fromBytes(byte[] data) {
return new RefData(data);
}
static RefData symbolic(String target) {
Encoder e = encode(2 + target.length());
e.string(TAG_SYMREF, target);
return new RefData(e.asByteArray());
}
static RefData id(AnyObjectId id) {
Encoder e = encode(4 + OBJECT_ID_STRING_LENGTH + ChunkKey.KEYLEN);
e.message(TAG_TARGET, IdWithChunk.encode(id));
return new RefData(e.asByteArray());
}
static RefData fromRef(Ref ref) {
if (ref.isSymbolic())
return symbolic(ref.getTarget().getName());
if (ref.getObjectId() == null)
return RefData.NONE;
int max = 8 + 2 * OBJECT_ID_STRING_LENGTH + 2 * ChunkKey.KEYLEN;
Encoder e = encode(max);
e.message(TAG_TARGET, IdWithChunk.encode(ref.getObjectId()));
if (ref.isPeeled()) {
e.bool(TAG_IS_PEELED, true);
if (ref.getPeeledObjectId() != null)
e.message(TAG_PEELED,
IdWithChunk.encode(ref.getPeeledObjectId()));
}
return new RefData(e.asByteArray());
}
static RefData peeled(ObjectId targetId, ObjectId peeledId) {
int max = 8 + 2 * OBJECT_ID_STRING_LENGTH + 2 * ChunkKey.KEYLEN;
Encoder e = encode(max);
e.message(TAG_TARGET, IdWithChunk.encode(targetId));
e.bool(TAG_IS_PEELED, true);
if (peeledId != null)
e.message(TAG_PEELED, IdWithChunk.encode(peeledId));
return new RefData(e.asByteArray());
}
private final byte[] data;
RefData(byte[] data) {
this.data = data;
}
TinyProtobuf.Decoder decode() {
return TinyProtobuf.decode(data);
}
/** @return the contents, encoded as a byte array for storage. */
public byte[] asBytes() {
return data;
}
@Override
public int hashCode() {
int hash = 5381;
for (int ptr = 0; ptr < data.length; ptr++)
hash = ((hash << 5) + hash) + (data[ptr] & 0xff);
return hash;
}
@Override
public boolean equals(Object other) {
if (other instanceof RefData)
return Arrays.equals(data, ((RefData) other).data);
return false;
}
@Override
public String toString() {
StringBuilder b = new StringBuilder();
TinyProtobuf.Decoder d = decode();
for (;;) {
switch (d.next()) {
case 0:
return b.toString().substring(1);
case TAG_SYMREF:
b.append("\nsymref: ").append(d.string());
continue;
case TAG_TARGET:
b.append("\ntarget: ").append(IdWithChunk.decode(d.message()));
continue;
case TAG_IS_PEELED:
b.append("\nis_peeled: ").append(d.bool());
continue;
case TAG_PEELED:
b.append("\npeeled: ").append(IdWithChunk.decode(d.message()));
continue;
default:
d.skip();
continue;
}
}
}
static class IdWithChunk extends ObjectId {
static ObjectId decode(TinyProtobuf.Decoder d) {
ObjectId id = null;
ChunkKey key = null;
DECODE: for (;;) {
switch (d.next()) {
case 0:
break DECODE;
case 1:
id = d.stringObjectId();
continue;
case 2:
key = ChunkKey.fromBytes(d);
continue;
default:
d.skip();
}
}
return key != null ? new IdWithChunk(id, key) : id;
}
static TinyProtobuf.Encoder encode(AnyObjectId id) {
if (id instanceof IdWithChunk) {
int max = 4 + OBJECT_ID_STRING_LENGTH + ChunkKey.KEYLEN;
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
e.string(1, id);
e.string(2, ((IdWithChunk) id).chunkKey);
return e;
} else {
int max = 2 + OBJECT_ID_STRING_LENGTH;
TinyProtobuf.Encoder e = TinyProtobuf.encode(max);
e.string(1, id);
return e;
}
}
private final ChunkKey chunkKey;
IdWithChunk(AnyObjectId id, ChunkKey key) {
super(id);
this.chunkKey = key;
}
ChunkKey getChunkKey() {
return chunkKey;
}
@Override
public String toString() {
return name() + "->" + chunkKey;
}
}
}

View File

@ -0,0 +1,132 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.dht;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Ref;
/** Tools to work with {@link RefData}. */
public class RefDataUtil {
/** Magic constant meaning does not exist. */
public static final RefData NONE = RefData.newBuilder().build();
static RefData symbolic(String target) {
RefData.Builder b = RefData.newBuilder();
b.setSymref(target);
return b.build();
}
static RefData id(AnyObjectId id) {
RefData.Builder b = RefData.newBuilder();
b.setTarget(toRefData(id));
return b.build();
}
static RefData fromRef(Ref ref) {
if (ref.isSymbolic())
return symbolic(ref.getTarget().getName());
if (ref.getObjectId() == null)
return NONE;
RefData.Builder b = RefData.newBuilder();
b.setTarget(toRefData(ref.getObjectId()));
if (ref.isPeeled()) {
b.setIsPeeled(true);
if (ref.getPeeledObjectId() != null)
b.setPeeled(toRefData(ref.getPeeledObjectId()));
}
return b.build();
}
static RefData peeled(ObjectId targetId, ObjectId peeledId) {
RefData.Builder b = RefData.newBuilder();
b.setTarget(toRefData(targetId));
b.setIsPeeled(true);
if (peeledId != null)
b.setPeeled(toRefData(peeledId));
return b.build();
}
private static RefData.Id toRefData(AnyObjectId id) {
RefData.Id.Builder r = RefData.Id.newBuilder();
r.setObjectName(id.name());
if (id instanceof IdWithChunk)
r.setChunkKey(((IdWithChunk) id).getChunkKey().asString());
return r.build();
}
static class IdWithChunk extends ObjectId {
static ObjectId create(RefData.Id src) {
if (src.hasChunkKey()) {
return new IdWithChunk(
ObjectId.fromString(src.getObjectName()),
ChunkKey.fromString(src.getChunkKey()));
}
return ObjectId.fromString(src.getObjectName());
}
private final ChunkKey chunkKey;
IdWithChunk(AnyObjectId id, ChunkKey key) {
super(id);
this.chunkKey = key;
}
ChunkKey getChunkKey() {
return chunkKey;
}
@Override
public String toString() {
return name() + "->" + chunkKey;
}
}
private RefDataUtil() {
// Utility class, do not create instances.
}
}

View File

@ -1,755 +0,0 @@
/*
* Copyright (C) 2011, Google Inc.
* and other copyright owners as documented in the project's IP log.
*
* This program and the accompanying materials are made available
* under the terms of the Eclipse Distribution License v1.0 which
* accompanies this distribution, is reproduced below, and is
* available at http://www.eclipse.org/org/documents/edl-v10.php
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* - Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* - Neither the name of the Eclipse Foundation, Inc. nor the
* names of its contributors may be used to endorse or promote
* products derived from this software without specific prior
* written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.eclipse.jgit.storage.dht;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_LENGTH;
import static org.eclipse.jgit.lib.Constants.OBJECT_ID_STRING_LENGTH;
import java.nio.ByteBuffer;
import java.text.MessageFormat;
import org.eclipse.jgit.lib.AnyObjectId;
import org.eclipse.jgit.lib.Constants;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.util.RawParseUtils;
/**
* A tiny implementation of a subset of the Google Protocol Buffers format.
* <p>
* For more information on the network format, see the canonical documentation
* at <a href="http://code.google.com/p/protobuf/">Google Protocol Buffers</a>.
*/
public class TinyProtobuf {
private static final int WIRE_VARINT = 0;
private static final int WIRE_FIXED_64 = 1;
private static final int WIRE_LEN_DELIM = 2;
private static final int WIRE_FIXED_32 = 5;
/**
* Create a new encoder.
*
* @param estimatedSize
* estimated size of the message. If the size is accurate,
* copying of the result can be avoided during
* {@link Encoder#asByteArray()}. If the size is too small, the
* buffer will grow dynamically.
* @return a new encoder.
*/
public static Encoder encode(int estimatedSize) {
return new Encoder(new byte[estimatedSize]);
}
/**
* Create an encoder that estimates size.
*
* @return a new encoder.
*/
public static Encoder size() {
return new Encoder(null);
}
/**
* Decode a buffer.
*
* @param buf
* the buffer to read.
* @return a new decoder.
*/
public static Decoder decode(byte[] buf) {
return decode(buf, 0, buf.length);
}
/**
* Decode a buffer.
*
* @param buf
* the buffer to read.
* @param off
* offset to begin reading from {@code buf}.
* @param len
* total number of bytes to read from {@code buf}.
* @return a new decoder.
*/
public static Decoder decode(byte[] buf, int off, int len) {
return new Decoder(buf, off, len);
}
/** An enumerated value that encodes/decodes as int32. */
public static interface Enum {
/** @return the wire value. */
public int value();
}
/** Decode fields from a binary protocol buffer. */
public static class Decoder {
private final byte[] buf;
private final int end;
private int ptr;
private int field;
private int type;
private int length;
private Decoder(byte[] buf, int off, int len) {
this.buf = buf;
this.ptr = off;
this.end = off + len;
}
/** @return get the field tag number, 0 on end of buffer. */
public int next() {
if (ptr == end)
return 0;
int fieldAndType = varint32();
field = fieldAndType >>> 3;
type = fieldAndType & 7;
return field;
}
/** Skip the current field's value. */
public void skip() {
switch (type) {
case WIRE_VARINT:
varint64();
break;
case WIRE_FIXED_64:
ptr += 8;
break;
case WIRE_LEN_DELIM:
ptr += varint32();
break;
case WIRE_FIXED_32:
ptr += 4;
break;
default:
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufUnsupportedFieldType, Integer
.valueOf(type)));
}
}
/** @return decode the current field as an int32. */
public int int32() {
checkFieldType(WIRE_VARINT);
return varint32();
}
/** @return decode the current field as an int64. */
public long int64() {
checkFieldType(WIRE_VARINT);
return varint64();
}
/**
* @param <T>
* the type of enumeration.
* @param all
* all of the supported values.
* @return decode the current field as an enumerated value.
*/
public <T extends Enum> T intEnum(T[] all) {
checkFieldType(WIRE_VARINT);
int value = varint32();
for (T t : all) {
if (t.value() == value)
return t;
}
throw new IllegalStateException(MessageFormat.format(
DhtText.get().protobufWrongFieldType, Integer
.valueOf(field), Integer.valueOf(type), all[0]
.getClass().getSimpleName()));
}
/** @return decode the current field as a bool. */
public boolean bool() {
checkFieldType(WIRE_VARINT);
int val = varint32();
switch (val) {
case 0:
return false;
case 1:
return true;
default:
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufNotBooleanValue, Integer.valueOf(field),
Integer.valueOf(val)));
}
}
/** @return decode a fixed 64 bit value. */
public long fixed64() {
checkFieldType(WIRE_FIXED_64);
long val = buf[ptr + 0] & 0xff;
val |= ((long) (buf[ptr + 1] & 0xff)) << (1 * 8);
val |= ((long) (buf[ptr + 2] & 0xff)) << (2 * 8);
val |= ((long) (buf[ptr + 3] & 0xff)) << (3 * 8);
val |= ((long) (buf[ptr + 4] & 0xff)) << (4 * 8);
val |= ((long) (buf[ptr + 5] & 0xff)) << (5 * 8);
val |= ((long) (buf[ptr + 6] & 0xff)) << (6 * 8);
val |= ((long) (buf[ptr + 7] & 0xff)) << (7 * 8);
ptr += 8;
return val;
}
/** @return decode the current field as a string. */
public String string() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
String s = RawParseUtils.decode(buf, ptr, ptr + len);
ptr += len;
return s;
}
/** @return decode the current hex string to an ObjectId. */
public ObjectId stringObjectId() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
if (len != OBJECT_ID_STRING_LENGTH)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldLength,
Integer.valueOf(field), Integer
.valueOf(OBJECT_ID_STRING_LENGTH), Integer
.valueOf(len)));
ObjectId id = ObjectId.fromString(buf, ptr);
ptr += OBJECT_ID_STRING_LENGTH;
return id;
}
/** @return decode a string from 8 hex digits. */
public int stringHex32() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
if (len != 8)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldLength,
Integer.valueOf(field), Integer.valueOf(8), Integer
.valueOf(len)));
int val = KeyUtils.parse32(buf, ptr);
ptr += 8;
return val;
}
/** @return decode the current field as an array of bytes. */
public byte[] bytes() {
checkFieldType(WIRE_LEN_DELIM);
byte[] r = new byte[varint32()];
System.arraycopy(buf, ptr, r, 0, r.length);
ptr += r.length;
return r;
}
/** @return backing array of the current field. */
public byte[] bytesArray() {
return buf;
}
/** @return length of field, call before {@link #bytesOffset}. */
public int bytesLength() {
checkFieldType(WIRE_LEN_DELIM);
length = varint32();
return length;
}
/** @return starting offset of the field, after {@link #bytesLength()}. */
public int bytesOffset() {
int start = ptr;
ptr += length;
return start;
}
/** @return decode the current raw bytes to an ObjectId. */
public ObjectId bytesObjectId() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
if (len != OBJECT_ID_LENGTH)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldLength,
Integer.valueOf(field), Integer
.valueOf(OBJECT_ID_LENGTH), Integer
.valueOf(len)));
ObjectId id = ObjectId.fromRaw(buf, ptr);
ptr += OBJECT_ID_LENGTH;
return id;
}
/** @return decode the current field as a nested message. */
public Decoder message() {
checkFieldType(WIRE_LEN_DELIM);
int len = varint32();
Decoder msg = decode(buf, ptr, len);
ptr += len;
return msg;
}
private int varint32() {
long v = varint64();
if (Integer.MAX_VALUE < v)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldType, Integer.valueOf(field),
"int64", "int32"));
return (int) v;
}
private long varint64() {
int c = buf[ptr++];
long r = c & 0x7f;
int shift = 7;
while ((c & 0x80) != 0) {
c = buf[ptr++];
r |= ((long) (c & 0x7f)) << shift;
shift += 7;
}
return r;
}
private void checkFieldType(int expected) {
if (type != expected)
throw new IllegalStateException(MessageFormat.format(DhtText
.get().protobufWrongFieldType, Integer.valueOf(field),
Integer.valueOf(type), Integer.valueOf(expected)));
}
}
/** Encode values into a binary protocol buffer. */
public static class Encoder {
private byte[] buf;
private int ptr;
private Encoder(byte[] buf) {
this.buf = buf;
}
/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store. Must be >= 0.
*/
public void int32(int field, int value) {
int64(field, value);
}
/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if 0.
*/
public void int32IfNotZero(int field, int value) {
int64IfNotZero(field, value);
}
/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if negative.
*/
public void int32IfNotNegative(int field, int value) {
int64IfNotNegative(field, value);
}
/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store. Must be >= 0.
*/
public void int64(int field, long value) {
if (value < 0)
throw new IllegalArgumentException(
DhtText.get().protobufNegativeValuesNotSupported);
field(field, WIRE_VARINT);
varint(value);
}
/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if 0.
*/
public void int64IfNotZero(int field, long value) {
if (0 != value)
int64(field, value);
}
/**
* Encode a variable length positive integer.
*
* @param field
* field tag number.
* @param value
* the value to store; omitted if negative.
*/
public void int64IfNotNegative(int field, long value) {
if (0 <= value)
int64(field, value);
}
/**
* Encode an enumerated value.
*
* @param <T>
* type of the enumerated values.
* @param field
* field tag number.
* @param value
* value to store; if null the field is omitted.
*/
public <T extends Enum> void intEnum(int field, T value) {
if (value != null) {
field(field, WIRE_VARINT);
varint(value.value());
}
}
/**
* Encode a boolean value.
*
* @param field
* field tag number.
* @param value
* the value to store.
*/
public void bool(int field, boolean value) {
field(field, WIRE_VARINT);
varint(value ? 1 : 0);
}
/**
* Encode a boolean value, only if true.
*
* @param field
* field tag number.
* @param value
* the value to store.
*/
public void boolIfTrue(int field, boolean value) {
if (value)
bool(field, value);
}
/**
* Encode a fixed 64 value.
*
* @param field
* field tag number.
* @param value
* the value to store.
*/
public void fixed64(int field, long value) {
field(field, WIRE_FIXED_64);
if (buf != null) {
ensureSpace(8);
buf[ptr + 0] = (byte) value;
value >>>= 8;
buf[ptr + 1] = (byte) value;
value >>>= 8;
buf[ptr + 3] = (byte) value;
value >>>= 8;
buf[ptr + 3] = (byte) value;
value >>>= 8;
buf[ptr + 4] = (byte) value;
value >>>= 8;
buf[ptr + 5] = (byte) value;
value >>>= 8;
buf[ptr + 6] = (byte) value;
value >>>= 8;
buf[ptr + 7] = (byte) value;
}
ptr += 8;
}
/**
* Encode a length delimited bytes field.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
*/
public void bytes(int field, byte[] value) {
if (value != null)
bytes(field, value, 0, value.length);
}
/**
* Encode a length delimited bytes field.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
*/
public void bytes(int field, ByteBuffer value) {
if (value != null) {
if (!value.hasArray())
throw new IllegalArgumentException(DhtText.get().protobufNoArray);
byte[] valBuf = value.array();
int valPtr = value.arrayOffset() + value.position();
int valLen = value.limit() - value.position();
bytes(field, valBuf, valPtr, valLen);
}
}
/**
* Encode a length delimited bytes field.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
* @param off
* position to copy from.
* @param len
* number of bytes to copy.
*/
public void bytes(int field, byte[] value, int off, int len) {
if (value != null) {
field(field, WIRE_LEN_DELIM);
varint(len);
copy(value, off, len);
}
}
/**
* Encode an ObjectId as a bytes (in raw binary format).
*
* @param field
* field tag number.
* @param value
* the value to store, as a raw binary; if null the field is
* omitted.
*/
public void bytes(int field, AnyObjectId value) {
if (value != null) {
field(field, WIRE_LEN_DELIM);
varint(OBJECT_ID_LENGTH);
if (buf != null) {
ensureSpace(OBJECT_ID_LENGTH);
value.copyRawTo(buf, ptr);
}
ptr += OBJECT_ID_LENGTH;
}
}
/**
* Encode an ObjectId as a string (in hex format).
*
* @param field
* field tag number.
* @param value
* the value to store, as a hex string; if null the field is
* omitted.
*/
public void string(int field, AnyObjectId value) {
if (value != null) {
field(field, WIRE_LEN_DELIM);
varint(OBJECT_ID_STRING_LENGTH);
if (buf != null) {
ensureSpace(OBJECT_ID_STRING_LENGTH);
value.copyTo(buf, ptr);
}
ptr += OBJECT_ID_STRING_LENGTH;
}
}
/**
* Encode a plain Java string.
*
* @param field
* field tag number.
* @param value
* the value to store; if null the field is omitted.
*/
public void string(int field, String value) {
if (value != null)
bytes(field, Constants.encode(value));
}
/**
* Encode a row key as a string.
*
* @param field
* field tag number.
* @param key
* the row key to store as a string; if null the field is
* omitted.
*/
public void string(int field, RowKey key) {
if (key != null)
bytes(field, key.asBytes());
}
/**
* Encode an integer as an 8 byte hex string.
*
* @param field
* field tag number.
* @param value
* value to encode.
*/
public void stringHex32(int field, int value) {
field(field, WIRE_LEN_DELIM);
varint(8);
if (buf != null) {
ensureSpace(8);
KeyUtils.format32(buf, ptr, value);
}
ptr += 8;
}
/**
* Encode a nested message.
*
* @param field
* field tag number.
* @param msg
* message to store; if null or empty the field is omitted.
*/
public void message(int field, Encoder msg) {
if (msg != null && msg.ptr > 0)
bytes(field, msg.buf, 0, msg.ptr);
}
private void field(int field, int type) {
varint((field << 3) | type);
}
private void varint(long value) {
if (buf != null) {
if (buf.length - ptr < 10)
ensureSpace(varintSize(value));
do {
byte b = (byte) (value & 0x7f);
value >>>= 7;
if (value != 0)
b |= 0x80;
buf[ptr++] = b;
} while (value != 0);
} else {
ptr += varintSize(value);
}
}
private static int varintSize(long value) {
value >>>= 7;
int need = 1;
for (; value != 0; value >>>= 7)
need++;
return need;
}
private void copy(byte[] src, int off, int cnt) {
if (buf != null) {
ensureSpace(cnt);
System.arraycopy(src, off, buf, ptr, cnt);
}
ptr += cnt;
}
private void ensureSpace(int need) {
if (buf.length - ptr < need) {
byte[] n = new byte[Math.max(ptr + need, buf.length * 2)];
System.arraycopy(buf, 0, n, 0, ptr);
buf = n;
}
}
/** @return size of the protocol buffer message, in bytes. */
public int size() {
return ptr;
}
/** @return the current buffer, as a byte array. */
public byte[] asByteArray() {
if (ptr == buf.length)
return buf;
byte[] r = new byte[ptr];
System.arraycopy(buf, 0, r, 0, ptr);
return r;
}
/** @return the current buffer. */
public ByteBuffer asByteBuffer() {
return ByteBuffer.wrap(buf, 0, ptr);
}
}
private TinyProtobuf() {
// Don't make instances.
}
}

View File

@ -44,11 +44,12 @@
package org.eclipse.jgit.storage.dht.spi; package org.eclipse.jgit.storage.dht.spi;
import java.util.Collection; import java.util.Collection;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.ChunkMeta;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.PackChunk; import org.eclipse.jgit.storage.dht.PackChunk;
import org.eclipse.jgit.storage.dht.StreamingCallback; import org.eclipse.jgit.storage.dht.StreamingCallback;
@ -113,7 +114,7 @@ public void get(Context options, Set<ChunkKey> keys,
* results early. * results early.
*/ */
public void getMeta(Context options, Set<ChunkKey> keys, public void getMeta(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback); AsyncCallback<Map<ChunkKey, ChunkMeta>> callback);
/** /**
* Put some (or all) of a single chunk. * Put some (or all) of a single chunk.

View File

@ -46,8 +46,9 @@
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RefData; import org.eclipse.jgit.storage.dht.RefDataUtil;
import org.eclipse.jgit.storage.dht.RefKey; import org.eclipse.jgit.storage.dht.RefKey;
import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.RepositoryKey;
@ -100,8 +101,8 @@ public boolean compareAndRemove(RefKey refKey, RefData oldData)
* @param oldData * @param oldData
* the old data for the reference. The put only occurs if the * the old data for the reference. The put only occurs if the
* value is still equal to {@code oldData}. Use * value is still equal to {@code oldData}. Use
* {@link RefData#NONE} if the reference should not exist and is * {@link RefDataUtil#NONE} if the reference should not exist and
* being created. * is being created.
* @param newData * @param newData
* new value to store. * new value to store.
* @return true if the put was successful; false if the current value does * @return true if the put was successful; false if the current value does

View File

@ -46,7 +46,7 @@
import java.util.Collection; import java.util.Collection;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.storage.dht.CachedPackInfo; import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.storage.dht.CachedPackKey; import org.eclipse.jgit.storage.dht.CachedPackKey;
import org.eclipse.jgit.storage.dht.ChunkInfo; import org.eclipse.jgit.storage.dht.ChunkInfo;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;

View File

@ -44,29 +44,36 @@
package org.eclipse.jgit.storage.dht.spi.cache; package org.eclipse.jgit.storage.dht.spi.cache;
import static java.util.Collections.singleton; import static java.util.Collections.singleton;
import static java.util.Collections.singletonMap;
import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.ChunkMeta;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.PackChunk; import org.eclipse.jgit.storage.dht.PackChunk;
import org.eclipse.jgit.storage.dht.StreamingCallback; import org.eclipse.jgit.storage.dht.StreamingCallback;
import org.eclipse.jgit.storage.dht.Sync; import org.eclipse.jgit.storage.dht.Sync;
import org.eclipse.jgit.storage.dht.TinyProtobuf;
import org.eclipse.jgit.storage.dht.spi.ChunkTable; import org.eclipse.jgit.storage.dht.spi.ChunkTable;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change; import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.CodedOutputStream;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.WireFormat;
/** Cache wrapper around ChunkTable. */ /** Cache wrapper around ChunkTable. */
public class CacheChunkTable implements ChunkTable { public class CacheChunkTable implements ChunkTable {
private final ChunkTable db; private final ChunkTable db;
@ -105,7 +112,7 @@ public void get(Context options, Set<ChunkKey> keys,
} }
public void getMeta(Context options, Set<ChunkKey> keys, public void getMeta(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback) { AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
List<CacheKey> toFind = new ArrayList<CacheKey>(keys.size()); List<CacheKey> toFind = new ArrayList<CacheKey>(keys.size());
for (ChunkKey k : keys) for (ChunkKey k : keys)
toFind.add(nsMeta.key(k)); toFind.add(nsMeta.key(k));
@ -118,8 +125,10 @@ public void put(PackChunk.Members chunk, WriteBuffer buffer)
db.put(chunk, buf.getWriteBuffer()); db.put(chunk, buf.getWriteBuffer());
// Only store fragmented meta. This is all callers should ask for. // Only store fragmented meta. This is all callers should ask for.
if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0) if (chunk.hasMeta() && chunk.getMeta().getFragmentCount() != 0) {
buf.put(nsMeta.key(chunk.getChunkKey()), chunk.getMeta().asBytes()); buf.put(nsMeta.key(chunk.getChunkKey()),
chunk.getMeta().toByteArray());
}
if (chunk.hasChunkData()) if (chunk.hasChunkData())
buf.put(nsChunk.key(chunk.getChunkKey()), encode(chunk)); buf.put(nsChunk.key(chunk.getChunkKey()), encode(chunk));
@ -135,57 +144,99 @@ public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException {
} }
private static byte[] encode(PackChunk.Members members) { private static byte[] encode(PackChunk.Members members) {
final byte[] meta; // Its too slow to encode ByteBuffer through the standard code.
if (members.hasMeta()) // Since the message is only 3 fields, do it by hand.
meta = members.getMeta().asBytes(); ByteBuffer data = members.getChunkDataAsByteBuffer();
else ByteBuffer index = members.getChunkIndexAsByteBuffer();
meta = null; ChunkMeta meta = members.getMeta();
ByteBuffer chunkData = members.getChunkDataAsByteBuffer(); int sz = 0;
ByteBuffer chunkIndex = members.getChunkIndexAsByteBuffer(); if (data != null)
sz += computeByteBufferSize(1, data);
if (index != null)
sz += computeByteBufferSize(2, index);
if (meta != null)
sz += CodedOutputStream.computeMessageSize(3, meta);
TinyProtobuf.Encoder sizer = TinyProtobuf.size(); byte[] r = new byte[sz];
TinyProtobuf.Encoder e = sizer; CodedOutputStream out = CodedOutputStream.newInstance(r);
do { try {
e.bytes(1, chunkData); if (data != null)
e.bytes(2, chunkIndex); writeByteBuffer(out, 1, data);
e.bytes(3, meta); if (index != null)
if (e == sizer) writeByteBuffer(out, 2, index);
e = TinyProtobuf.encode(e.size()); if (meta != null)
else out.writeMessage(3, meta);
return e.asByteArray(); } catch (IOException err) {
} while (true); throw new RuntimeException("Cannot buffer chunk", err);
}
return r;
}
private static int computeByteBufferSize(int fieldNumber, ByteBuffer data) {
int n = data.remaining();
return CodedOutputStream.computeTagSize(fieldNumber)
+ CodedOutputStream.computeRawVarint32Size(n)
+ n;
}
private static void writeByteBuffer(CodedOutputStream out, int fieldNumber,
ByteBuffer data) throws IOException {
byte[] d = data.array();
int p = data.arrayOffset() + data.position();
int n = data.remaining();
out.writeTag(fieldNumber, WireFormat.WIRETYPE_LENGTH_DELIMITED);
out.writeRawVarint32(n);
out.writeRawBytes(d, p, n);
} }
private static PackChunk.Members decode(ChunkKey key, byte[] raw) { private static PackChunk.Members decode(ChunkKey key, byte[] raw) {
PackChunk.Members members = new PackChunk.Members(); PackChunk.Members members = new PackChunk.Members();
members.setChunkKey(key); members.setChunkKey(key);
TinyProtobuf.Decoder d = TinyProtobuf.decode(raw); // Its too slow to convert using the standard code, as copies
for (;;) { // are made. Instead find offsets in the stream and use that.
switch (d.next()) { CodedInputStream in = CodedInputStream.newInstance(raw);
case 0: try {
return members; int tag = in.readTag();
case 1: { for (;;) {
int cnt = d.bytesLength(); switch (WireFormat.getTagFieldNumber(tag)) {
int ptr = d.bytesOffset(); case 0:
byte[] buf = d.bytesArray(); return members;
members.setChunkData(buf, ptr, cnt); case 1: {
continue; int cnt = in.readRawVarint32();
} int ptr = in.getTotalBytesRead();
case 2: { members.setChunkData(raw, ptr, cnt);
int cnt = d.bytesLength(); in.skipRawBytes(cnt);
int ptr = d.bytesOffset(); tag = in.readTag();
byte[] buf = d.bytesArray(); if (WireFormat.getTagFieldNumber(tag) != 2)
members.setChunkIndex(buf, ptr, cnt); continue;
continue; }
} //$FALL-THROUGH$
case 3: case 2: {
members.setMeta(ChunkMeta.fromBytes(key, d.message())); int cnt = in.readRawVarint32();
continue; int ptr = in.getTotalBytesRead();
default: members.setChunkIndex(raw, ptr, cnt);
d.skip(); in.skipRawBytes(cnt);
tag = in.readTag();
if (WireFormat.getTagFieldNumber(tag) != 3)
continue;
}
//$FALL-THROUGH$
case 3: {
int cnt = in.readRawVarint32();
int oldLimit = in.pushLimit(cnt);
members.setMeta(ChunkMeta.parseFrom(in));
in.popLimit(oldLimit);
tag = in.readTag();
continue;
}
default:
in.skipField(tag);
}
} }
} catch (IOException err) {
throw new RuntimeException("Cannot decode chunk", err);
} }
} }
@ -329,41 +380,49 @@ private class MetaFromCache implements
private final Set<ChunkKey> remaining; private final Set<ChunkKey> remaining;
private final AsyncCallback<Collection<ChunkMeta>> normalCallback; private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback;
private final StreamingCallback<Collection<ChunkMeta>> streamingCallback; private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback;
private final List<ChunkMeta> all; private final Map<ChunkKey, ChunkMeta> all;
MetaFromCache(Context options, Set<ChunkKey> keys, MetaFromCache(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback) { AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
this.options = options; this.options = options;
this.remaining = new HashSet<ChunkKey>(keys); this.remaining = new HashSet<ChunkKey>(keys);
this.normalCallback = callback; this.normalCallback = callback;
if (callback instanceof StreamingCallback<?>) { if (callback instanceof StreamingCallback<?>) {
streamingCallback = (StreamingCallback<Collection<ChunkMeta>>) callback; streamingCallback = (StreamingCallback<Map<ChunkKey, ChunkMeta>>) callback;
all = null; all = null;
} else { } else {
streamingCallback = null; streamingCallback = null;
all = new ArrayList<ChunkMeta>(keys.size()); all = new HashMap<ChunkKey, ChunkMeta>();
} }
} }
public void onPartialResult(Map<CacheKey, byte[]> result) { public void onPartialResult(Map<CacheKey, byte[]> result) {
for (Map.Entry<CacheKey, byte[]> ent : result.entrySet()) { for (Map.Entry<CacheKey, byte[]> ent : result.entrySet()) {
ChunkKey key = ChunkKey.fromBytes(ent.getKey().getBytes()); ChunkKey key = ChunkKey.fromBytes(ent.getKey().getBytes());
ChunkMeta meta = ChunkMeta.fromBytes(key, ent.getValue()); ChunkMeta meta;
try {
meta = ChunkMeta.parseFrom(ent.getValue());
} catch (InvalidProtocolBufferException e) {
// Invalid meta message, remove the cell from cache.
client.modify(singleton(Change.remove(ent.getKey())),
Sync.<Void> none());
continue;
}
if (streamingCallback != null) { if (streamingCallback != null) {
streamingCallback.onPartialResult(singleton(meta)); streamingCallback.onPartialResult(singletonMap(key, meta));
synchronized (lock) { synchronized (lock) {
remaining.remove(key); remaining.remove(key);
} }
} else { } else {
synchronized (lock) { synchronized (lock) {
all.add(meta); all.put(key, meta);
remaining.remove(key); remaining.remove(key);
} }
} }
@ -391,31 +450,31 @@ public void onFailure(DhtException error) {
} }
private class MetaFromDatabase implements private class MetaFromDatabase implements
StreamingCallback<Collection<ChunkMeta>> { StreamingCallback<Map<ChunkKey, ChunkMeta>> {
private final Object lock = new Object(); private final Object lock = new Object();
private final List<ChunkMeta> all; private final Map<ChunkKey, ChunkMeta> all;
private final AsyncCallback<Collection<ChunkMeta>> normalCallback; private final AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback;
private final StreamingCallback<Collection<ChunkMeta>> streamingCallback; private final StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback;
MetaFromDatabase(List<ChunkMeta> all, MetaFromDatabase(Map<ChunkKey, ChunkMeta> all,
AsyncCallback<Collection<ChunkMeta>> normalCallback, AsyncCallback<Map<ChunkKey, ChunkMeta>> normalCallback,
StreamingCallback<Collection<ChunkMeta>> streamingCallback) { StreamingCallback<Map<ChunkKey, ChunkMeta>> streamingCallback) {
this.all = all; this.all = all;
this.normalCallback = normalCallback; this.normalCallback = normalCallback;
this.streamingCallback = streamingCallback; this.streamingCallback = streamingCallback;
} }
public void onPartialResult(Collection<ChunkMeta> result) { public void onPartialResult(Map<ChunkKey, ChunkMeta> result) {
final List<ChunkMeta> toPutIntoCache = copy(result); final Map<ChunkKey, ChunkMeta> toPutIntoCache = copy(result);
if (streamingCallback != null) if (streamingCallback != null)
streamingCallback.onPartialResult(result); streamingCallback.onPartialResult(result);
else { else {
synchronized (lock) { synchronized (lock) {
all.addAll(result); all.putAll(result);
} }
} }
@ -425,20 +484,22 @@ public void onPartialResult(Collection<ChunkMeta> result) {
// //
executor.submit(new Runnable() { executor.submit(new Runnable() {
public void run() { public void run() {
for (ChunkMeta meta : toPutIntoCache) { for (Map.Entry<ChunkKey, ChunkMeta> ent
ChunkKey key = meta.getChunkKey(); : toPutIntoCache.entrySet()) {
Change op = Change.put(nsMeta.key(key), meta.asBytes()); ChunkKey key = ent.getKey();
Change op = Change.put(nsMeta.key(key),
ent.getValue().toByteArray());
client.modify(singleton(op), none); client.modify(singleton(op), none);
} }
} }
}); });
} }
private <T> List<T> copy(Collection<T> result) { private <K, V> Map<K, V> copy(Map<K, V> result) {
return new ArrayList<T>(result); return new HashMap<K, V>(result);
} }
public void onSuccess(Collection<ChunkMeta> result) { public void onSuccess(Map<ChunkKey, ChunkMeta> result) {
if (result != null && !result.isEmpty()) if (result != null && !result.isEmpty())
onPartialResult(result); onPartialResult(result);

View File

@ -44,7 +44,9 @@
package org.eclipse.jgit.storage.dht.spi.cache; package org.eclipse.jgit.storage.dht.spi.cache;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
@ -52,6 +54,7 @@
import java.util.Set; import java.util.Set;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedObjectIndex;
import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
@ -59,12 +62,13 @@
import org.eclipse.jgit.storage.dht.ObjectInfo; import org.eclipse.jgit.storage.dht.ObjectInfo;
import org.eclipse.jgit.storage.dht.StreamingCallback; import org.eclipse.jgit.storage.dht.StreamingCallback;
import org.eclipse.jgit.storage.dht.Sync; import org.eclipse.jgit.storage.dht.Sync;
import org.eclipse.jgit.storage.dht.TinyProtobuf;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable; import org.eclipse.jgit.storage.dht.spi.ObjectIndexTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change; import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
import com.google.protobuf.InvalidProtocolBufferException;
/** Cache wrapper around ObjectIndexTable. */ /** Cache wrapper around ObjectIndexTable. */
public class CacheObjectIndexTable implements ObjectIndexTable { public class CacheObjectIndexTable implements ObjectIndexTable {
private final ObjectIndexTable db; private final ObjectIndexTable db;
@ -125,58 +129,6 @@ public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer)
buf.remove(ns.key(objId)); buf.remove(ns.key(objId));
} }
private static byte[] encode(Collection<ObjectInfo> list) {
TinyProtobuf.Encoder e = TinyProtobuf.encode(128);
for (ObjectInfo info : list) {
TinyProtobuf.Encoder m = TinyProtobuf.encode(128);
m.bytes(1, info.getChunkKey().asBytes());
m.bytes(2, info.asBytes());
m.fixed64(3, info.getTime());
e.message(1, m);
}
return e.asByteArray();
}
private static ObjectInfo decodeItem(TinyProtobuf.Decoder m) {
ChunkKey key = null;
TinyProtobuf.Decoder data = null;
long time = -1;
for (;;) {
switch (m.next()) {
case 0:
return ObjectInfo.fromBytes(key, data, time);
case 1:
key = ChunkKey.fromBytes(m);
continue;
case 2:
data = m.message();
continue;
case 3:
time = m.fixed64();
continue;
default:
m.skip();
}
}
}
private static Collection<ObjectInfo> decode(byte[] raw) {
List<ObjectInfo> res = new ArrayList<ObjectInfo>(1);
TinyProtobuf.Decoder d = TinyProtobuf.decode(raw);
for (;;) {
switch (d.next()) {
case 0:
return res;
case 1:
res.add(decodeItem(d.message()));
continue;
default:
d.skip();
}
}
}
private class LoaderFromCache implements private class LoaderFromCache implements
StreamingCallback<Map<CacheKey, byte[]>> { StreamingCallback<Map<CacheKey, byte[]>> {
private final Object lock = new Object(); private final Object lock = new Object();
@ -217,7 +169,15 @@ public void onPartialResult(Map<CacheKey, byte[]> result) {
for (Map.Entry<CacheKey, byte[]> e : result.entrySet()) { for (Map.Entry<CacheKey, byte[]> e : result.entrySet()) {
ObjectIndexKey objKey; ObjectIndexKey objKey;
Collection<ObjectInfo> list = decode(e.getValue()); Collection<ObjectInfo> list;
try {
list = decode(e.getValue());
} catch (InvalidProtocolBufferException badCell) {
client.modify(
Collections.singleton(Change.remove(e.getKey())),
Sync.<Void> none());
continue;
}
objKey = ObjectIndexKey.fromBytes(e.getKey().getBytes()); objKey = ObjectIndexKey.fromBytes(e.getKey().getBytes());
if (tmp != null) if (tmp != null)
@ -238,6 +198,21 @@ public void onPartialResult(Map<CacheKey, byte[]> result) {
} }
} }
private Collection<ObjectInfo> decode(byte[] value)
throws InvalidProtocolBufferException {
CachedObjectIndex cacheEntry = CachedObjectIndex.parseFrom(value);
int sz = cacheEntry.getItemCount();
ObjectInfo[] r = new ObjectInfo[sz];
for (int i = 0; i < sz; i++) {
CachedObjectIndex.Item item = cacheEntry.getItem(i);
r[i] = new ObjectInfo(
ChunkKey.fromString(item.getChunkKey()),
item.getTime(),
item.getObjectInfo());
}
return Arrays.asList(r);
}
public void onSuccess(Map<CacheKey, byte[]> result) { public void onSuccess(Map<CacheKey, byte[]> result) {
if (result != null && !result.isEmpty()) if (result != null && !result.isEmpty())
onPartialResult(result); onPartialResult(result);
@ -305,6 +280,19 @@ public void run() {
client.modify(ops, Sync.<Void> none()); client.modify(ops, Sync.<Void> none());
} }
private byte[] encode(List<ObjectInfo> items) {
CachedObjectIndex.Builder b;
b = CachedObjectIndex.newBuilder();
for (ObjectInfo info : items) {
CachedObjectIndex.Item.Builder i = b.addItemBuilder();
i.setChunkKey(info.getChunkKey().asString());
i.setObjectInfo(info.getData());
if (0 < info.getTime())
i.setTime(info.getTime());
}
return b.build().toByteArray();
}
}); });
} }

View File

@ -46,8 +46,8 @@
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RefData;
import org.eclipse.jgit.storage.dht.RefKey; import org.eclipse.jgit.storage.dht.RefKey;
import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;

View File

@ -46,24 +46,24 @@
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static java.util.Collections.singleton; import static java.util.Collections.singleton;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.storage.dht.CachedPackInfo; import org.eclipse.jgit.generated.storage.dht.proto.GitCache.CachedPackInfoList;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.storage.dht.CachedPackKey; import org.eclipse.jgit.storage.dht.CachedPackKey;
import org.eclipse.jgit.storage.dht.ChunkInfo; import org.eclipse.jgit.storage.dht.ChunkInfo;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.Sync; import org.eclipse.jgit.storage.dht.Sync;
import org.eclipse.jgit.storage.dht.TinyProtobuf;
import org.eclipse.jgit.storage.dht.spi.RepositoryTable; import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change; import org.eclipse.jgit.storage.dht.spi.cache.CacheService.Change;
import com.google.protobuf.InvalidProtocolBufferException;
/** Cache wrapper around RepositoryTable. */ /** Cache wrapper around RepositoryTable. */
public class CacheRepositoryTable implements RepositoryTable { public class CacheRepositoryTable implements RepositoryTable {
private final RepositoryTable db; private final RepositoryTable db;
@ -126,26 +126,20 @@ public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo)
byte[] data = result.get(memKey); byte[] data = result.get(memKey);
if (data != null) { if (data != null) {
List<CachedPackInfo> r = new ArrayList<CachedPackInfo>(); try {
TinyProtobuf.Decoder d = TinyProtobuf.decode(data); return CachedPackInfoList.parseFrom(data).getPackList();
for (;;) { } catch (InvalidProtocolBufferException e) {
switch (d.next()) { // Invalidate the cache entry and fall through.
case 0: client.modify(singleton(Change.remove(memKey)), none);
return r;
case 1:
r.add(CachedPackInfo.fromBytes(d.message()));
continue;
default:
d.skip();
}
} }
} }
Collection<CachedPackInfo> r = db.getCachedPacks(repo); Collection<CachedPackInfo> r = db.getCachedPacks(repo);
TinyProtobuf.Encoder e = TinyProtobuf.encode(1024); CachedPackInfoList.Builder list = CachedPackInfoList.newBuilder();
for (CachedPackInfo info : r) list.addAllPack(r);
e.bytes(1, info.asBytes()); client.modify(
client.modify(singleton(Change.put(memKey, e.asByteArray())), none); singleton(Change.put(memKey, list.build().toByteArray())),
none);
return r; return r;
} }

View File

@ -43,21 +43,27 @@
package org.eclipse.jgit.storage.dht.spi.memory; package org.eclipse.jgit.storage.dht.spi.memory;
import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.Set; import java.util.Set;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.ChunkMeta;
import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.ChunkMeta;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.PackChunk; import org.eclipse.jgit.storage.dht.PackChunk;
import org.eclipse.jgit.storage.dht.spi.ChunkTable; import org.eclipse.jgit.storage.dht.spi.ChunkTable;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
import com.google.protobuf.InvalidProtocolBufferException;
final class MemChunkTable implements ChunkTable { final class MemChunkTable implements ChunkTable {
private final MemTable table = new MemTable(); private final MemTable table = new MemTable();
@ -89,8 +95,15 @@ public void get(Context options, Set<ChunkKey> keys,
m.setChunkIndex(cell.getValue()); m.setChunkIndex(cell.getValue());
cell = table.get(row, colMeta.name()); cell = table.get(row, colMeta.name());
if (cell != null) if (cell != null) {
m.setMeta(ChunkMeta.fromBytes(chunk, cell.getValue())); try {
m.setMeta(ChunkMeta.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException err) {
callback.onFailure(new DhtException(MessageFormat.format(
DhtText.get().invalidChunkMeta, chunk), err));
return;
}
}
out.add(m); out.add(m);
} }
@ -99,15 +112,21 @@ public void get(Context options, Set<ChunkKey> keys,
} }
public void getMeta(Context options, Set<ChunkKey> keys, public void getMeta(Context options, Set<ChunkKey> keys,
AsyncCallback<Collection<ChunkMeta>> callback) { AsyncCallback<Map<ChunkKey, ChunkMeta>> callback) {
int cnt = keys.size(); Map<ChunkKey, ChunkMeta> out = new HashMap<ChunkKey, ChunkMeta>();
List<ChunkMeta> out = new ArrayList<ChunkMeta>(cnt);
for (ChunkKey chunk : keys) { for (ChunkKey chunk : keys) {
byte[] row = chunk.asBytes(); byte[] row = chunk.asBytes();
MemTable.Cell cell = table.get(row, colMeta.name()); MemTable.Cell cell = table.get(row, colMeta.name());
if (cell != null) if (cell != null) {
out.add(ChunkMeta.fromBytes(chunk, cell.getValue())); try {
out.put(chunk, ChunkMeta.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException err) {
callback.onFailure(new DhtException(MessageFormat.format(
DhtText.get().invalidChunkMeta, chunk), err));
return;
}
}
} }
callback.onSuccess(out); callback.onSuccess(out);
@ -124,7 +143,7 @@ public void put(PackChunk.Members chunk, WriteBuffer buffer)
table.put(row, colIndex.name(), chunk.getChunkIndex()); table.put(row, colIndex.name(), chunk.getChunkIndex());
if (chunk.hasMeta()) if (chunk.hasMeta())
table.put(row, colMeta.name(), chunk.getMeta().asBytes()); table.put(row, colMeta.name(), chunk.getMeta().toByteArray());
} }
public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException { public void remove(ChunkKey key, WriteBuffer buffer) throws DhtException {

View File

@ -43,15 +43,18 @@
package org.eclipse.jgit.storage.dht.spi.memory; package org.eclipse.jgit.storage.dht.spi.memory;
import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore;
import org.eclipse.jgit.storage.dht.AsyncCallback; import org.eclipse.jgit.storage.dht.AsyncCallback;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.ObjectIndexKey; import org.eclipse.jgit.storage.dht.ObjectIndexKey;
import org.eclipse.jgit.storage.dht.ObjectInfo; import org.eclipse.jgit.storage.dht.ObjectInfo;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
@ -59,6 +62,8 @@
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
import com.google.protobuf.InvalidProtocolBufferException;
final class MemObjectIndexTable implements ObjectIndexTable { final class MemObjectIndexTable implements ObjectIndexTable {
private final MemTable table = new MemTable(); private final MemTable table = new MemTable();
@ -70,17 +75,25 @@ public void get(Context options, Set<ObjectIndexKey> objects,
for (ObjectIndexKey objId : objects) { for (ObjectIndexKey objId : objects) {
for (MemTable.Cell cell : table.scanFamily(objId.asBytes(), colInfo)) { for (MemTable.Cell cell : table.scanFamily(objId.asBytes(), colInfo)) {
Collection<ObjectInfo> info = out.get(objId); Collection<ObjectInfo> chunks = out.get(objId);
if (info == null) { ChunkKey chunkKey;
info = new ArrayList<ObjectInfo>(4); if (chunks == null) {
out.put(objId, info); chunks = new ArrayList<ObjectInfo>(4);
out.put(objId, chunks);
} }
ChunkKey chunk = ChunkKey.fromBytes( chunkKey = ChunkKey.fromBytes(colInfo.suffix(cell.getName()));
colInfo.suffix(cell.getName())); try {
byte[] value = cell.getValue(); chunks.add(new ObjectInfo(
long time = cell.getTimestamp(); chunkKey,
info.add(ObjectInfo.fromBytes(chunk, value, time)); cell.getTimestamp(),
GitStore.ObjectInfo.parseFrom(cell.getValue())));
} catch (InvalidProtocolBufferException badCell) {
callback.onFailure(new DhtException(MessageFormat.format(
DhtText.get().invalidObjectInfo, objId, chunkKey),
badCell));
return;
}
} }
} }
@ -91,7 +104,7 @@ public void add(ObjectIndexKey objId, ObjectInfo info, WriteBuffer buffer)
throws DhtException { throws DhtException {
ChunkKey chunk = info.getChunkKey(); ChunkKey chunk = info.getChunkKey();
table.put(objId.asBytes(), colInfo.append(chunk.asBytes()), table.put(objId.asBytes(), colInfo.append(chunk.asBytes()),
info.asBytes()); info.getData().toByteArray());
} }
public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer) public void remove(ObjectIndexKey objId, ChunkKey chunk, WriteBuffer buffer)

View File

@ -43,18 +43,23 @@
package org.eclipse.jgit.storage.dht.spi.memory; package org.eclipse.jgit.storage.dht.spi.memory;
import java.text.MessageFormat;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.eclipse.jgit.generated.storage.dht.proto.GitStore.RefData;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.RefData; import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.RefDataUtil;
import org.eclipse.jgit.storage.dht.RefKey; import org.eclipse.jgit.storage.dht.RefKey;
import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.spi.Context; import org.eclipse.jgit.storage.dht.spi.Context;
import org.eclipse.jgit.storage.dht.spi.RefTable; import org.eclipse.jgit.storage.dht.spi.RefTable;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
import com.google.protobuf.InvalidProtocolBufferException;
final class MemRefTable implements RefTable { final class MemRefTable implements RefTable {
private final MemTable table = new MemTable(); private final MemTable table = new MemTable();
@ -65,8 +70,12 @@ public Map<RefKey, RefData> getAll(Context options, RepositoryKey repository)
Map<RefKey, RefData> out = new HashMap<RefKey, RefData>(); Map<RefKey, RefData> out = new HashMap<RefKey, RefData>();
for (MemTable.Cell cell : table.scanFamily(repository.asBytes(), colRef)) { for (MemTable.Cell cell : table.scanFamily(repository.asBytes(), colRef)) {
RefKey ref = RefKey.fromBytes(colRef.suffix(cell.getName())); RefKey ref = RefKey.fromBytes(colRef.suffix(cell.getName()));
RefData val = RefData.fromBytes(cell.getValue()); try {
out.put(ref, val); out.put(ref, RefData.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException badCell) {
throw new DhtException(MessageFormat.format(
DhtText.get().invalidRefData, ref), badCell);
}
} }
return out; return out;
} }
@ -77,8 +86,8 @@ public boolean compareAndPut(RefKey refKey, RefData oldData, RefData newData)
return table.compareAndSet( // return table.compareAndSet( //
repo.asBytes(), // repo.asBytes(), //
colRef.append(refKey.asBytes()), // colRef.append(refKey.asBytes()), //
oldData != RefData.NONE ? oldData.asBytes() : null, // oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, //
newData.asBytes()); newData.toByteArray());
} }
public boolean compareAndRemove(RefKey refKey, RefData oldData) public boolean compareAndRemove(RefKey refKey, RefData oldData)
@ -87,7 +96,7 @@ public boolean compareAndRemove(RefKey refKey, RefData oldData)
return table.compareAndSet( // return table.compareAndSet( //
repo.asBytes(), // repo.asBytes(), //
colRef.append(refKey.asBytes()), // colRef.append(refKey.asBytes()), //
oldData != RefData.NONE ? oldData.asBytes() : null, // oldData != RefDataUtil.NONE ? oldData.toByteArray() : null, //
null); null);
} }
} }

View File

@ -43,22 +43,26 @@
package org.eclipse.jgit.storage.dht.spi.memory; package org.eclipse.jgit.storage.dht.spi.memory;
import java.text.MessageFormat;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.eclipse.jgit.storage.dht.CachedPackInfo; import org.eclipse.jgit.generated.storage.dht.proto.GitStore.CachedPackInfo;
import org.eclipse.jgit.storage.dht.CachedPackKey; import org.eclipse.jgit.storage.dht.CachedPackKey;
import org.eclipse.jgit.storage.dht.ChunkInfo; import org.eclipse.jgit.storage.dht.ChunkInfo;
import org.eclipse.jgit.storage.dht.ChunkKey; import org.eclipse.jgit.storage.dht.ChunkKey;
import org.eclipse.jgit.storage.dht.DhtException; import org.eclipse.jgit.storage.dht.DhtException;
import org.eclipse.jgit.storage.dht.DhtText;
import org.eclipse.jgit.storage.dht.RepositoryKey; import org.eclipse.jgit.storage.dht.RepositoryKey;
import org.eclipse.jgit.storage.dht.spi.RepositoryTable; import org.eclipse.jgit.storage.dht.spi.RepositoryTable;
import org.eclipse.jgit.storage.dht.spi.WriteBuffer; import org.eclipse.jgit.storage.dht.spi.WriteBuffer;
import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher; import org.eclipse.jgit.storage.dht.spi.util.ColumnMatcher;
import com.google.protobuf.InvalidProtocolBufferException;
final class MemRepositoryTable implements RepositoryTable { final class MemRepositoryTable implements RepositoryTable {
private final AtomicInteger nextId = new AtomicInteger(); private final AtomicInteger nextId = new AtomicInteger();
@ -76,7 +80,7 @@ public void put(RepositoryKey repo, ChunkInfo info, WriteBuffer buffer)
throws DhtException { throws DhtException {
table.put(repo.asBytes(), table.put(repo.asBytes(),
colChunkInfo.append(info.getChunkKey().asBytes()), colChunkInfo.append(info.getChunkKey().asBytes()),
info.asBytes()); info.getData().toByteArray());
} }
public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer) public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer)
@ -87,16 +91,24 @@ public void remove(RepositoryKey repo, ChunkKey chunk, WriteBuffer buffer)
public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo) public Collection<CachedPackInfo> getCachedPacks(RepositoryKey repo)
throws DhtException, TimeoutException { throws DhtException, TimeoutException {
List<CachedPackInfo> out = new ArrayList<CachedPackInfo>(4); List<CachedPackInfo> out = new ArrayList<CachedPackInfo>(4);
for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack)) for (MemTable.Cell cell : table.scanFamily(repo.asBytes(), colCachedPack)) {
out.add(CachedPackInfo.fromBytes(cell.getValue())); try {
out.add(CachedPackInfo.parseFrom(cell.getValue()));
} catch (InvalidProtocolBufferException e) {
throw new DhtException(MessageFormat.format(
DhtText.get().invalidCachedPackInfo, repo,
CachedPackKey.fromBytes(cell.getName())), e);
}
}
return out; return out;
} }
public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer) public void put(RepositoryKey repo, CachedPackInfo info, WriteBuffer buffer)
throws DhtException { throws DhtException {
CachedPackKey key = CachedPackKey.fromInfo(info);
table.put(repo.asBytes(), table.put(repo.asBytes(),
colCachedPack.append(info.getRowKey().asBytes()), colCachedPack.append(key.asBytes()),
info.asBytes()); info.toByteArray());
} }
public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer) public void remove(RepositoryKey repo, CachedPackKey key, WriteBuffer buffer)

View File

@ -135,6 +135,7 @@
<args4j-version>2.0.12</args4j-version> <args4j-version>2.0.12</args4j-version>
<servlet-api-version>2.5</servlet-api-version> <servlet-api-version>2.5</servlet-api-version>
<jetty-version>7.1.6.v20100715</jetty-version> <jetty-version>7.1.6.v20100715</jetty-version>
<protobuf-version>2.4.0a</protobuf-version>
</properties> </properties>
<build> <build>
@ -348,6 +349,12 @@
<artifactId>jetty-servlet</artifactId> <artifactId>jetty-servlet</artifactId>
<version>${jetty-version}</version> <version>${jetty-version}</version>
</dependency> </dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>${protobuf-version}</version>
</dependency>
</dependencies> </dependencies>
</dependencyManagement> </dependencyManagement>
@ -392,6 +399,7 @@
<modules> <modules>
<module>org.eclipse.jgit</module> <module>org.eclipse.jgit</module>
<module>org.eclipse.jgit.generated.storage.dht.proto</module>
<module>org.eclipse.jgit.storage.dht</module> <module>org.eclipse.jgit.storage.dht</module>
<module>org.eclipse.jgit.ant</module> <module>org.eclipse.jgit.ant</module>
<module>org.eclipse.jgit.ui</module> <module>org.eclipse.jgit.ui</module>