merge(3p/git): Merge git subtree at v2.29.2

This also bumps the stable nixpkgs to 20.09 as of 2020-11-21, because
there is some breakage in the git build related to the netrc
credentials helper which someone has taken care of in nixpkgs.

The stable channel is not used for anything other than git, so this
should be fine.

Change-Id: I3575a19dab09e1e9556cf8231d717de9890484fb
This commit is contained in:
Vincent Ambo 2020-11-21 19:20:35 +01:00
parent 082c006c04
commit f4609b896f
1485 changed files with 241535 additions and 109418 deletions

View file

@ -0,0 +1,988 @@
#
# Copyright (c) 2020 Sibi Siddharthan
#
#[[
Instructions how to use this in Visual Studio:
Open the worktree as a folder. Visual Studio 2019 and later will detect
the CMake configuration automatically and set everything up for you,
ready to build. You can then run the tests in `t/` via a regular Git Bash.
Note: Visual Studio also has the option of opening `CMakeLists.txt`
directly; Using this option, Visual Studio will not find the source code,
though, therefore the `File>Open>Folder...` option is preferred.
Instructions to run CMake manually:
mkdir -p contrib/buildsystems/out
cd contrib/buildsystems/out
cmake ../ -DCMAKE_BUILD_TYPE=Release
This will build the git binaries in contrib/buildsystems/out
directory (our top-level .gitignore file knows to ignore contents of
this directory).
Possible build configurations(-DCMAKE_BUILD_TYPE) with corresponding
compiler flags
Debug : -g
Release: -O3
RelWithDebInfo : -O2 -g
MinSizeRel : -Os
empty(default) :
NOTE: -DCMAKE_BUILD_TYPE is optional. For multi-config generators like Visual Studio
this option is ignored
This process generates a Makefile(Linux/*BSD/MacOS) , Visual Studio solution(Windows) by default.
Run `make` to build Git on Linux/*BSD/MacOS.
Open git.sln on Windows and build Git.
NOTE: By default CMake uses Makefile as the build tool on Linux and Visual Studio in Windows,
to use another tool say `ninja` add this to the command line when configuring.
`-G Ninja`
]]
cmake_minimum_required(VERSION 3.14)
#set the source directory to root of git
set(CMAKE_SOURCE_DIR ${CMAKE_CURRENT_LIST_DIR}/../..)
if(WIN32)
set(VCPKG_DIR "${CMAKE_SOURCE_DIR}/compat/vcbuild/vcpkg")
if(MSVC AND NOT EXISTS ${VCPKG_DIR})
message("Initializing vcpkg and building the Git's dependencies (this will take a while...)")
execute_process(COMMAND ${CMAKE_SOURCE_DIR}/compat/vcbuild/vcpkg_install.bat)
endif()
list(APPEND CMAKE_PREFIX_PATH "${VCPKG_DIR}/installed/x64-windows")
# In the vcpkg edition, we need this to be able to link to libcurl
set(CURL_NO_CURL_CMAKE ON)
endif()
find_program(SH_EXE sh PATHS "C:/Program Files/Git/bin")
if(NOT SH_EXE)
message(FATAL_ERROR "sh: shell interpreter was not found in your path, please install one."
"On Windows, you can get it as part of 'Git for Windows' install at https://gitforwindows.org/")
endif()
#Create GIT-VERSION-FILE using GIT-VERSION-GEN
if(NOT EXISTS ${CMAKE_SOURCE_DIR}/GIT-VERSION-FILE)
message("Generating GIT-VERSION-FILE")
execute_process(COMMAND ${SH_EXE} ${CMAKE_SOURCE_DIR}/GIT-VERSION-GEN
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR})
endif()
#Parse GIT-VERSION-FILE to get the version
file(STRINGS ${CMAKE_SOURCE_DIR}/GIT-VERSION-FILE git_version REGEX "GIT_VERSION = (.*)")
string(REPLACE "GIT_VERSION = " "" git_version ${git_version})
string(FIND ${git_version} "GIT" location)
if(location EQUAL -1)
string(REGEX MATCH "[0-9]*\\.[0-9]*\\.[0-9]*" git_version ${git_version})
else()
string(REGEX MATCH "[0-9]*\\.[0-9]*" git_version ${git_version})
string(APPEND git_version ".0") #for building from a snapshot
endif()
project(git
VERSION ${git_version}
LANGUAGES C)
#TODO gitk git-gui gitweb
#TODO Enable NLS on windows natively
#TODO Add pcre support
#macros for parsing the Makefile for sources and scripts
macro(parse_makefile_for_sources list_var regex)
file(STRINGS ${CMAKE_SOURCE_DIR}/Makefile ${list_var} REGEX "^${regex} \\+=(.*)")
string(REPLACE "${regex} +=" "" ${list_var} ${${list_var}})
string(REPLACE "$(COMPAT_OBJS)" "" ${list_var} ${${list_var}}) #remove "$(COMPAT_OBJS)" This is only for libgit.
string(STRIP ${${list_var}} ${list_var}) #remove trailing/leading whitespaces
string(REPLACE ".o" ".c;" ${list_var} ${${list_var}}) #change .o to .c, ; is for converting the string into a list
list(TRANSFORM ${list_var} STRIP) #remove trailing/leading whitespaces for each element in list
list(REMOVE_ITEM ${list_var} "") #remove empty list elements
endmacro()
macro(parse_makefile_for_scripts list_var regex lang)
file(STRINGS ${CMAKE_SOURCE_DIR}/Makefile ${list_var} REGEX "^${regex} \\+=(.*)")
string(REPLACE "${regex} +=" "" ${list_var} ${${list_var}})
string(STRIP ${${list_var}} ${list_var}) #remove trailing/leading whitespaces
string(REPLACE " " ";" ${list_var} ${${list_var}}) #convert string to a list
if(NOT ${lang}) #exclude for SCRIPT_LIB
list(TRANSFORM ${list_var} REPLACE "${lang}" "") #do the replacement
endif()
endmacro()
include(CheckTypeSize)
include(CheckCSourceRuns)
include(CheckCSourceCompiles)
include(CheckIncludeFile)
include(CheckFunctionExists)
include(CheckSymbolExists)
include(CheckStructHasMember)
include(CTest)
find_package(ZLIB REQUIRED)
find_package(CURL)
find_package(EXPAT)
find_package(Iconv)
#Don't use libintl on Windows Visual Studio and Clang builds
if(NOT (WIN32 AND (CMAKE_C_COMPILER_ID STREQUAL "MSVC" OR CMAKE_C_COMPILER_ID STREQUAL "Clang")))
find_package(Intl)
endif()
if(NOT Intl_FOUND)
add_compile_definitions(NO_GETTEXT)
if(NOT Iconv_FOUND)
add_compile_definitions(NO_ICONV)
endif()
endif()
include_directories(SYSTEM ${ZLIB_INCLUDE_DIRS})
if(CURL_FOUND)
include_directories(SYSTEM ${CURL_INCLUDE_DIRS})
endif()
if(EXPAT_FOUND)
include_directories(SYSTEM ${EXPAT_INCLUDE_DIRS})
endif()
if(Iconv_FOUND)
include_directories(SYSTEM ${Iconv_INCLUDE_DIRS})
endif()
if(Intl_FOUND)
include_directories(SYSTEM ${Intl_INCLUDE_DIRS})
endif()
if(WIN32 AND NOT MSVC)#not required for visual studio builds
find_program(WINDRES_EXE windres)
if(NOT WINDRES_EXE)
message(FATAL_ERROR "Install windres on Windows for resource files")
endif()
endif()
find_program(MSGFMT_EXE msgfmt)
if(NOT MSGFMT_EXE)
set(MSGFMT_EXE ${CMAKE_SOURCE_DIR}/compat/vcbuild/vcpkg/downloads/tools/msys2/msys64/usr/bin/msgfmt.exe)
if(NOT EXISTS ${MSGFMT_EXE})
message(WARNING "Text Translations won't be built")
unset(MSGFMT_EXE)
endif()
endif()
#Force all visual studio outputs to CMAKE_BINARY_DIR
if(CMAKE_C_COMPILER_ID STREQUAL "MSVC")
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_BINARY_DIR})
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_BINARY_DIR})
add_compile_options(/MP)
endif()
#default behaviour
include_directories(${CMAKE_SOURCE_DIR})
add_compile_definitions(GIT_HOST_CPU="${CMAKE_SYSTEM_PROCESSOR}")
add_compile_definitions(SHA256_BLK INTERNAL_QSORT RUNTIME_PREFIX)
add_compile_definitions(NO_OPENSSL SHA1_DC SHA1DC_NO_STANDARD_INCLUDES
SHA1DC_INIT_SAFE_HASH_DEFAULT=0
SHA1DC_CUSTOM_INCLUDE_SHA1_C="cache.h"
SHA1DC_CUSTOM_INCLUDE_UBC_CHECK_C="git-compat-util.h" )
list(APPEND compat_SOURCES sha1dc_git.c sha1dc/sha1.c sha1dc/ubc_check.c block-sha1/sha1.c sha256/block/sha256.c compat/qsort_s.c)
add_compile_definitions(PAGER_ENV="LESS=FRX LV=-c"
ETC_GITATTRIBUTES="etc/gitattributes"
ETC_GITCONFIG="etc/gitconfig"
GIT_EXEC_PATH="libexec/git-core"
GIT_LOCALE_PATH="share/locale"
GIT_MAN_PATH="share/man"
GIT_INFO_PATH="share/info"
GIT_HTML_PATH="share/doc/git-doc"
DEFAULT_HELP_FORMAT="html"
DEFAULT_GIT_TEMPLATE_DIR="share/git-core/templates"
GIT_VERSION="${PROJECT_VERSION}.GIT"
GIT_USER_AGENT="git/${PROJECT_VERSION}.GIT"
BINDIR="bin"
GIT_BUILT_FROM_COMMIT="")
if(WIN32)
set(FALLBACK_RUNTIME_PREFIX /mingw64)
add_compile_definitions(FALLBACK_RUNTIME_PREFIX="${FALLBACK_RUNTIME_PREFIX}")
else()
set(FALLBACK_RUNTIME_PREFIX /home/$ENV{USER})
add_compile_definitions(FALLBACK_RUNTIME_PREFIX="${FALLBACK_RUNTIME_PREFIX}")
endif()
#Platform Specific
if(CMAKE_SYSTEM_NAME STREQUAL "Windows")
if(CMAKE_C_COMPILER_ID STREQUAL "MSVC" OR CMAKE_C_COMPILER_ID STREQUAL "Clang")
include_directories(${CMAKE_SOURCE_DIR}/compat/vcbuild/include)
add_compile_definitions(_CRT_SECURE_NO_WARNINGS _CRT_NONSTDC_NO_DEPRECATE)
endif()
include_directories(${CMAKE_SOURCE_DIR}/compat/win32)
add_compile_definitions(HAVE_ALLOCA_H NO_POSIX_GOODIES NATIVE_CRLF NO_UNIX_SOCKETS WIN32
_CONSOLE DETECT_MSYS_TTY STRIP_EXTENSION=".exe" NO_SYMLINK_HEAD UNRELIABLE_FSTAT
NOGDI OBJECT_CREATION_MODE=1 __USE_MINGW_ANSI_STDIO=0
USE_NED_ALLOCATOR OVERRIDE_STRDUP MMAP_PREVENTS_DELETE USE_WIN32_MMAP
UNICODE _UNICODE HAVE_WPGMPTR ENSURE_MSYSTEM_IS_SET)
list(APPEND compat_SOURCES compat/mingw.c compat/winansi.c compat/win32/path-utils.c
compat/win32/pthread.c compat/win32mmap.c compat/win32/syslog.c
compat/win32/trace2_win32_process_info.c compat/win32/dirent.c
compat/nedmalloc/nedmalloc.c compat/strdup.c)
set(NO_UNIX_SOCKETS 1)
elseif(CMAKE_SYSTEM_NAME STREQUAL "Linux")
add_compile_definitions(PROCFS_EXECUTABLE_PATH="/proc/self/exe" HAVE_DEV_TTY )
list(APPEND compat_SOURCES unix-socket.c)
endif()
set(EXE_EXTENSION ${CMAKE_EXECUTABLE_SUFFIX})
#header checks
check_include_file(libgen.h HAVE_LIBGEN_H)
if(NOT HAVE_LIBGEN_H)
add_compile_definitions(NO_LIBGEN_H)
list(APPEND compat_SOURCES compat/basename.c)
endif()
check_include_file(sys/sysinfo.h HAVE_SYSINFO)
if(HAVE_SYSINFO)
add_compile_definitions(HAVE_SYSINFO)
endif()
check_c_source_compiles("
#include <alloca.h>
int main(void)
{
char *p = (char *) alloca(2 * sizeof(int));
if (p)
return 0;
return 0;
}"
HAVE_ALLOCA_H)
if(HAVE_ALLOCA_H)
add_compile_definitions(HAVE_ALLOCA_H)
endif()
check_include_file(strings.h HAVE_STRINGS_H)
if(HAVE_STRINGS_H)
add_compile_definitions(HAVE_STRINGS_H)
endif()
check_include_file(sys/select.h HAVE_SYS_SELECT_H)
if(NOT HAVE_SYS_SELECT_H)
add_compile_definitions(NO_SYS_SELECT_H)
endif()
check_include_file(sys/poll.h HAVE_SYS_POLL_H)
if(NOT HAVE_SYS_POLL_H)
add_compile_definitions(NO_SYS_POLL_H)
endif()
check_include_file(poll.h HAVE_POLL_H)
if(NOT HAVE_POLL_H)
add_compile_definitions(NO_POLL_H)
endif()
check_include_file(inttypes.h HAVE_INTTYPES_H)
if(NOT HAVE_INTTYPES_H)
add_compile_definitions(NO_INTTYPES_H)
endif()
check_include_file(paths.h HAVE_PATHS_H)
if(HAVE_PATHS_H)
add_compile_definitions(HAVE_PATHS_H)
endif()
#function checks
set(function_checks
strcasestr memmem strlcpy strtoimax strtoumax strtoull
setenv mkdtemp poll pread memmem)
#unsetenv,hstrerror are incompatible with windows build
if(NOT WIN32)
list(APPEND function_checks unsetenv hstrerror)
endif()
foreach(f ${function_checks})
string(TOUPPER ${f} uf)
check_function_exists(${f} HAVE_${uf})
if(NOT HAVE_${uf})
add_compile_definitions(NO_${uf})
endif()
endforeach()
if(NOT HAVE_POLL_H OR NOT HAVE_SYS_POLL_H OR NOT HAVE_POLL)
include_directories(${CMAKE_SOURCE_DIR}/compat/poll)
add_compile_definitions(NO_POLL)
list(APPEND compat_SOURCES compat/poll/poll.c)
endif()
if(NOT HAVE_STRCASESTR)
list(APPEND compat_SOURCES compat/strcasestr.c)
endif()
if(NOT HAVE_STRLCPY)
list(APPEND compat_SOURCES compat/strlcpy.c)
endif()
if(NOT HAVE_STRTOUMAX)
list(APPEND compat_SOURCES compat/strtoumax.c compat/strtoimax.c)
endif()
if(NOT HAVE_SETENV)
list(APPEND compat_SOURCES compat/setenv.c)
endif()
if(NOT HAVE_MKDTEMP)
list(APPEND compat_SOURCES compat/mkdtemp.c)
endif()
if(NOT HAVE_PREAD)
list(APPEND compat_SOURCES compat/pread.c)
endif()
if(NOT HAVE_MEMMEM)
list(APPEND compat_SOURCES compat/memmem.c)
endif()
if(NOT WIN32)
if(NOT HAVE_UNSETENV)
list(APPEND compat_SOURCES compat/unsetenv.c)
endif()
if(NOT HAVE_HSTRERROR)
list(APPEND compat_SOURCES compat/hstrerror.c)
endif()
endif()
check_function_exists(getdelim HAVE_GETDELIM)
if(HAVE_GETDELIM)
add_compile_definitions(HAVE_GETDELIM)
endif()
check_function_exists(clock_gettime HAVE_CLOCK_GETTIME)
check_symbol_exists(CLOCK_MONOTONIC "time.h" HAVE_CLOCK_MONOTONIC)
if(HAVE_CLOCK_GETTIME)
add_compile_definitions(HAVE_CLOCK_GETTIME)
endif()
if(HAVE_CLOCK_MONOTONIC)
add_compile_definitions(HAVE_CLOCK_MONOTONIC)
endif()
#check for st_blocks in struct stat
check_struct_has_member("struct stat" st_blocks "sys/stat.h" STRUCT_STAT_HAS_ST_BLOCKS)
if(NOT STRUCT_STAT_HAS_ST_BLOCKS)
add_compile_definitions(NO_ST_BLOCKS_IN_STRUCT_STAT)
endif()
#compile checks
check_c_source_runs("
#include<stdio.h>
#include<stdarg.h>
#include<string.h>
#include<stdlib.h>
int test_vsnprintf(char *str, size_t maxsize, const char *format, ...)
{
int ret;
va_list ap;
va_start(ap, format);
ret = vsnprintf(str, maxsize, format, ap);
va_end(ap);
return ret;
}
int main(void)
{
char buf[6];
if (test_vsnprintf(buf, 3, \"%s\", \"12345\") != 5
|| strcmp(buf, \"12\"))
return 1;
if (snprintf(buf, 3, \"%s\", \"12345\") != 5
|| strcmp(buf, \"12\"))
return 1;
return 0;
}"
SNPRINTF_OK)
if(NOT SNPRINTF_OK)
add_compile_definitions(SNPRINTF_RETURNS_BOGUS)
list(APPEND compat_SOURCES compat/snprintf.c)
endif()
check_c_source_runs("
#include<stdio.h>
int main(void)
{
FILE *f = fopen(\".\", \"r\");
return f != NULL;
}"
FREAD_READS_DIRECTORIES_NO)
if(NOT FREAD_READS_DIRECTORIES_NO)
add_compile_definitions(FREAD_READS_DIRECTORIES)
list(APPEND compat_SOURCES compat/fopen.c)
endif()
check_c_source_compiles("
#include <regex.h>
#ifndef REG_STARTEND
#error oops we dont have it
#endif
int main(void)
{
return 0;
}"
HAVE_REGEX)
if(NOT HAVE_REGEX)
include_directories(${CMAKE_SOURCE_DIR}/compat/regex)
list(APPEND compat_SOURCES compat/regex/regex.c )
add_compile_definitions(NO_REGEX NO_MBSUPPORT GAWK)
endif()
check_c_source_compiles("
#include <stddef.h>
#include <sys/types.h>
#include <sys/sysctl.h>
int main(void)
{
int val, mib[2];
size_t len;
mib[0] = CTL_HW;
mib[1] = 1;
len = sizeof(val);
return sysctl(mib, 2, &val, &len, NULL, 0) ? 1 : 0;
}"
HAVE_BSD_SYSCTL)
if(HAVE_BSD_SYSCTL)
add_compile_definitions(HAVE_BSD_SYSCTL)
endif()
set(CMAKE_REQUIRED_LIBRARIES ${Iconv_LIBRARIES})
set(CMAKE_REQUIRED_INCLUDES ${Iconv_INCLUDE_DIRS})
check_c_source_compiles("
#include <iconv.h>
extern size_t iconv(iconv_t cd,
char **inbuf, size_t *inbytesleft,
char **outbuf, size_t *outbytesleft);
int main(void)
{
return 0;
}"
HAVE_NEW_ICONV)
if(HAVE_NEW_ICONV)
set(HAVE_OLD_ICONV 0)
else()
set(HAVE_OLD_ICONV 1)
endif()
check_c_source_runs("
#include <iconv.h>
#if ${HAVE_OLD_ICONV}
typedef const char *iconv_ibp;
#else
typedef char *iconv_ibp;
#endif
int main(void)
{
int v;
iconv_t conv;
char in[] = \"a\";
iconv_ibp pin = in;
char out[20] = \"\";
char *pout = out;
size_t isz = sizeof(in);
size_t osz = sizeof(out);
conv = iconv_open(\"UTF-16\", \"UTF-8\");
iconv(conv, &pin, &isz, &pout, &osz);
iconv_close(conv);
v = (unsigned char)(out[0]) + (unsigned char)(out[1]);
return v != 0xfe + 0xff;
}"
ICONV_DOESNOT_OMIT_BOM)
if(NOT ICONV_DOESNOT_OMIT_BOM)
add_compile_definitions(ICONV_OMITS_BOM)
endif()
unset(CMAKE_REQUIRED_LIBRARIES)
unset(CMAKE_REQUIRED_INCLUDES)
#programs
set(PROGRAMS_BUILT
git git-daemon git-http-backend git-sh-i18n--envsubst
git-shell)
if(NOT CURL_FOUND)
list(APPEND excluded_progs git-http-fetch git-http-push)
add_compile_definitions(NO_CURL)
message(WARNING "git-http-push and git-http-fetch will not be built")
else()
list(APPEND PROGRAMS_BUILT git-http-fetch git-http-push git-imap-send git-remote-http)
if(CURL_VERSION_STRING VERSION_GREATER_EQUAL 7.34.0)
add_compile_definitions(USE_CURL_FOR_IMAP_SEND)
endif()
endif()
if(NOT EXPAT_FOUND)
list(APPEND excluded_progs git-http-push)
add_compile_definitions(NO_EXPAT)
else()
list(APPEND PROGRAMS_BUILT git-http-push)
if(EXPAT_VERSION_STRING VERSION_LESS_EQUAL 1.2)
add_compile_definitions(EXPAT_NEEDS_XMLPARSE_H)
endif()
endif()
list(REMOVE_DUPLICATES excluded_progs)
list(REMOVE_DUPLICATES PROGRAMS_BUILT)
foreach(p ${excluded_progs})
list(APPEND EXCLUSION_PROGS --exclude-program ${p} )
endforeach()
#for comparing null values
list(APPEND EXCLUSION_PROGS empty)
set(EXCLUSION_PROGS_CACHE ${EXCLUSION_PROGS} CACHE STRING "Programs not built" FORCE)
if(NOT EXISTS ${CMAKE_BINARY_DIR}/command-list.h OR NOT EXCLUSION_PROGS_CACHE STREQUAL EXCLUSION_PROGS)
list(REMOVE_ITEM EXCLUSION_PROGS empty)
message("Generating command-list.h")
execute_process(COMMAND ${SH_EXE} ${CMAKE_SOURCE_DIR}/generate-cmdlist.sh ${EXCLUSION_PROGS} command-list.txt
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_FILE ${CMAKE_BINARY_DIR}/command-list.h)
endif()
if(NOT EXISTS ${CMAKE_BINARY_DIR}/config-list.h)
message("Generating config-list.h")
execute_process(COMMAND ${SH_EXE} ${CMAKE_SOURCE_DIR}/generate-configlist.sh
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
OUTPUT_FILE ${CMAKE_BINARY_DIR}/config-list.h)
endif()
include_directories(${CMAKE_BINARY_DIR})
#build
#libgit
parse_makefile_for_sources(libgit_SOURCES "LIB_OBJS")
list(TRANSFORM libgit_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/")
list(TRANSFORM compat_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/")
add_library(libgit ${libgit_SOURCES} ${compat_SOURCES})
#libxdiff
parse_makefile_for_sources(libxdiff_SOURCES "XDIFF_OBJS")
list(TRANSFORM libxdiff_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/")
add_library(xdiff STATIC ${libxdiff_SOURCES})
if(WIN32)
if(NOT MSVC)#use windres when compiling with gcc and clang
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/git.res
COMMAND ${WINDRES_EXE} -O coff -DMAJOR=${PROJECT_VERSION_MAJOR} -DMINOR=${PROJECT_VERSION_MINOR}
-DMICRO=${PROJECT_VERSION_PATCH} -DPATCHLEVEL=0 -DGIT_VERSION="\\\"${PROJECT_VERSION}.GIT\\\""
-i ${CMAKE_SOURCE_DIR}/git.rc -o ${CMAKE_BINARY_DIR}/git.res
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
VERBATIM)
else()#MSVC use rc
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/git.res
COMMAND ${CMAKE_RC_COMPILER} /d MAJOR=${PROJECT_VERSION_MAJOR} /d MINOR=${PROJECT_VERSION_MINOR}
/d MICRO=${PROJECT_VERSION_PATCH} /d PATCHLEVEL=0 /d GIT_VERSION="${PROJECT_VERSION}.GIT"
/fo ${CMAKE_BINARY_DIR}/git.res ${CMAKE_SOURCE_DIR}/git.rc
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}
VERBATIM)
endif()
add_custom_target(git-rc DEPENDS ${CMAKE_BINARY_DIR}/git.res)
endif()
#link all required libraries to common-main
add_library(common-main OBJECT ${CMAKE_SOURCE_DIR}/common-main.c)
target_link_libraries(common-main libgit xdiff ${ZLIB_LIBRARIES})
if(Intl_FOUND)
target_link_libraries(common-main ${Intl_LIBRARIES})
endif()
if(Iconv_FOUND)
target_link_libraries(common-main ${Iconv_LIBRARIES})
endif()
if(WIN32)
target_link_libraries(common-main ws2_32 ntdll ${CMAKE_BINARY_DIR}/git.res)
add_dependencies(common-main git-rc)
if(CMAKE_C_COMPILER_ID STREQUAL "GNU")
target_link_options(common-main PUBLIC -municode -Wl,--nxcompat -Wl,--dynamicbase -Wl,--pic-executable,-e,mainCRTStartup)
elseif(CMAKE_C_COMPILER_ID STREQUAL "Clang")
target_link_options(common-main PUBLIC -municode -Wl,-nxcompat -Wl,-dynamicbase -Wl,-entry:wmainCRTStartup -Wl,invalidcontinue.obj)
elseif(CMAKE_C_COMPILER_ID STREQUAL "MSVC")
target_link_options(common-main PUBLIC /IGNORE:4217 /IGNORE:4049 /NOLOGO /ENTRY:wmainCRTStartup /SUBSYSTEM:CONSOLE invalidcontinue.obj)
else()
message(FATAL_ERROR "Unhandled compiler: ${CMAKE_C_COMPILER_ID}")
endif()
elseif(UNIX)
target_link_libraries(common-main pthread rt)
endif()
#git
parse_makefile_for_sources(git_SOURCES "BUILTIN_OBJS")
list(TRANSFORM git_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/")
add_executable(git ${CMAKE_SOURCE_DIR}/git.c ${git_SOURCES})
target_link_libraries(git common-main)
add_executable(git-daemon ${CMAKE_SOURCE_DIR}/daemon.c)
target_link_libraries(git-daemon common-main)
add_executable(git-http-backend ${CMAKE_SOURCE_DIR}/http-backend.c)
target_link_libraries(git-http-backend common-main)
add_executable(git-sh-i18n--envsubst ${CMAKE_SOURCE_DIR}/sh-i18n--envsubst.c)
target_link_libraries(git-sh-i18n--envsubst common-main)
add_executable(git-shell ${CMAKE_SOURCE_DIR}/shell.c)
target_link_libraries(git-shell common-main)
if(CURL_FOUND)
add_library(http_obj OBJECT ${CMAKE_SOURCE_DIR}/http.c)
add_executable(git-imap-send ${CMAKE_SOURCE_DIR}/imap-send.c)
target_link_libraries(git-imap-send http_obj common-main ${CURL_LIBRARIES})
add_executable(git-http-fetch ${CMAKE_SOURCE_DIR}/http-walker.c ${CMAKE_SOURCE_DIR}/http-fetch.c)
target_link_libraries(git-http-fetch http_obj common-main ${CURL_LIBRARIES})
add_executable(git-remote-http ${CMAKE_SOURCE_DIR}/http-walker.c ${CMAKE_SOURCE_DIR}/remote-curl.c)
target_link_libraries(git-remote-http http_obj common-main ${CURL_LIBRARIES} )
if(EXPAT_FOUND)
add_executable(git-http-push ${CMAKE_SOURCE_DIR}/http-push.c)
target_link_libraries(git-http-push http_obj common-main ${CURL_LIBRARIES} ${EXPAT_LIBRARIES})
endif()
endif()
set(git_builtin_extra
cherry cherry-pick format-patch fsck-objects
init merge-subtree restore show
stage status switch whatchanged)
#Creating hardlinks
foreach(s ${git_SOURCES} ${git_builtin_extra})
string(REPLACE "${CMAKE_SOURCE_DIR}/builtin/" "" s ${s})
string(REPLACE ".c" "" s ${s})
file(APPEND ${CMAKE_BINARY_DIR}/CreateLinks.cmake "file(CREATE_LINK git${EXE_EXTENSION} git-${s}${EXE_EXTENSION})\n")
list(APPEND git_links ${CMAKE_BINARY_DIR}/git-${s}${EXE_EXTENSION})
endforeach()
if(CURL_FOUND)
set(remote_exes
git-remote-https git-remote-ftp git-remote-ftps)
foreach(s ${remote_exes})
file(APPEND ${CMAKE_BINARY_DIR}/CreateLinks.cmake "file(CREATE_LINK git-remote-http${EXE_EXTENSION} ${s}${EXE_EXTENSION})\n")
list(APPEND git_http_links ${CMAKE_BINARY_DIR}/${s}${EXE_EXTENSION})
endforeach()
endif()
add_custom_command(OUTPUT ${git_links} ${git_http_links}
COMMAND ${CMAKE_COMMAND} -P ${CMAKE_BINARY_DIR}/CreateLinks.cmake
DEPENDS git git-remote-http)
add_custom_target(git-links ALL DEPENDS ${git_links} ${git_http_links})
#creating required scripts
set(SHELL_PATH /bin/sh)
set(PERL_PATH /usr/bin/perl)
set(LOCALEDIR ${FALLBACK_RUNTIME_PREFIX}/share/locale)
set(GITWEBDIR ${FALLBACK_RUNTIME_PREFIX}/share/locale)
set(INSTLIBDIR ${FALLBACK_RUNTIME_PREFIX}/share/perl5)
#shell scripts
parse_makefile_for_scripts(git_sh_scripts "SCRIPT_SH" ".sh")
parse_makefile_for_scripts(git_shlib_scripts "SCRIPT_LIB" "")
set(git_shell_scripts
${git_sh_scripts} ${git_shlib_scripts} git-instaweb)
foreach(script ${git_shell_scripts})
file(STRINGS ${CMAKE_SOURCE_DIR}/${script}.sh content NEWLINE_CONSUME)
string(REPLACE "@SHELL_PATH@" "${SHELL_PATH}" content "${content}")
string(REPLACE "@@DIFF@@" "diff" content "${content}")
string(REPLACE "@LOCALEDIR@" "${LOCALEDIR}" content "${content}")
string(REPLACE "@GITWEBDIR@" "${GITWEBDIR}" content "${content}")
string(REPLACE "@@NO_CURL@@" "" content "${content}")
string(REPLACE "@@USE_GETTEXT_SCHEME@@" "" content "${content}")
string(REPLACE "# @@BROKEN_PATH_FIX@@" "" content "${content}")
string(REPLACE "@@PERL@@" "${PERL_PATH}" content "${content}")
string(REPLACE "@@SANE_TEXT_GREP@@" "-a" content "${content}")
string(REPLACE "@@PAGER_ENV@@" "LESS=FRX LV=-c" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/${script} ${content})
endforeach()
#perl scripts
parse_makefile_for_scripts(git_perl_scripts "SCRIPT_PERL" ".perl")
#create perl header
file(STRINGS ${CMAKE_SOURCE_DIR}/perl/header_templates/fixed_prefix.template.pl perl_header )
string(REPLACE "@@PATHSEP@@" ":" perl_header "${perl_header}")
string(REPLACE "@@INSTLIBDIR@@" "${INSTLIBDIR}" perl_header "${perl_header}")
foreach(script ${git_perl_scripts})
file(STRINGS ${CMAKE_SOURCE_DIR}/${script}.perl content NEWLINE_CONSUME)
string(REPLACE "#!/usr/bin/perl" "#!/usr/bin/perl\n${perl_header}\n" content "${content}")
string(REPLACE "@@GIT_VERSION@@" "${PROJECT_VERSION}" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/${script} ${content})
endforeach()
#python script
file(STRINGS ${CMAKE_SOURCE_DIR}/git-p4.py content NEWLINE_CONSUME)
string(REPLACE "#!/usr/bin/env python" "#!/usr/bin/python" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/git-p4 ${content})
#perl modules
file(GLOB_RECURSE perl_modules "${CMAKE_SOURCE_DIR}/perl/*.pm")
foreach(pm ${perl_modules})
string(REPLACE "${CMAKE_SOURCE_DIR}/perl/" "" file_path ${pm})
file(STRINGS ${pm} content NEWLINE_CONSUME)
string(REPLACE "@@LOCALEDIR@@" "${LOCALEDIR}" content "${content}")
string(REPLACE "@@NO_PERL_CPAN_FALLBACKS@@" "" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/perl/build/lib/${file_path} ${content})
#test-lib.sh requires perl/build/lib to be the build directory of perl modules
endforeach()
#templates
file(GLOB templates "${CMAKE_SOURCE_DIR}/templates/*")
list(TRANSFORM templates REPLACE "${CMAKE_SOURCE_DIR}/templates/" "")
list(REMOVE_ITEM templates ".gitignore")
list(REMOVE_ITEM templates "Makefile")
list(REMOVE_ITEM templates "blt")# Prevents an error when reconfiguring for in source builds
list(REMOVE_ITEM templates "branches--")
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/templates/blt/branches) #create branches
#templates have @.*@ replacement so use configure_file instead
foreach(tm ${templates})
string(REPLACE "--" "/" blt_tm ${tm})
string(REPLACE "this" "" blt_tm ${blt_tm})# for this--
configure_file(${CMAKE_SOURCE_DIR}/templates/${tm} ${CMAKE_BINARY_DIR}/templates/blt/${blt_tm} @ONLY)
endforeach()
#translations
if(MSGFMT_EXE)
file(GLOB po_files "${CMAKE_SOURCE_DIR}/po/*.po")
list(TRANSFORM po_files REPLACE "${CMAKE_SOURCE_DIR}/po/" "")
list(TRANSFORM po_files REPLACE ".po" "")
foreach(po ${po_files})
file(MAKE_DIRECTORY ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES)
add_custom_command(OUTPUT ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES/git.mo
COMMAND ${MSGFMT_EXE} --check --statistics -o ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES/git.mo ${CMAKE_SOURCE_DIR}/po/${po}.po)
list(APPEND po_gen ${CMAKE_BINARY_DIR}/po/build/locale/${po}/LC_MESSAGES/git.mo)
endforeach()
add_custom_target(po-gen ALL DEPENDS ${po_gen})
endif()
#to help with the install
list(TRANSFORM git_shell_scripts PREPEND "${CMAKE_BINARY_DIR}/")
list(TRANSFORM git_perl_scripts PREPEND "${CMAKE_BINARY_DIR}/")
#install
install(TARGETS git git-shell
RUNTIME DESTINATION bin)
install(PROGRAMS ${CMAKE_BINARY_DIR}/git-cvsserver
DESTINATION bin)
list(REMOVE_ITEM PROGRAMS_BUILT git git-shell)
install(TARGETS ${PROGRAMS_BUILT}
RUNTIME DESTINATION libexec/git-core)
set(bin_links
git-receive-pack git-upload-archive git-upload-pack)
foreach(b ${bin_links})
install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/bin/${b}${EXE_EXTENSION})")
endforeach()
install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/git${EXE_EXTENSION})")
install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git-shell${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/git-shell${EXE_EXTENSION})")
foreach(b ${git_links})
string(REPLACE "${CMAKE_BINARY_DIR}" "" b ${b})
install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/bin/git${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/${b}${EXE_EXTENSION})")
endforeach()
foreach(b ${git_http_links})
string(REPLACE "${CMAKE_BINARY_DIR}" "" b ${b})
install(CODE "file(CREATE_LINK ${CMAKE_INSTALL_PREFIX}/libexec/git-core/git-remote-http${EXE_EXTENSION} ${CMAKE_INSTALL_PREFIX}/libexec/git-core/${b}${EXE_EXTENSION})")
endforeach()
install(PROGRAMS ${git_shell_scripts} ${git_perl_scripts} ${CMAKE_BINARY_DIR}/git-p4
DESTINATION libexec/git-core)
install(DIRECTORY ${CMAKE_SOURCE_DIR}/mergetools DESTINATION libexec/git-core)
install(DIRECTORY ${CMAKE_BINARY_DIR}/perl/build/lib/ DESTINATION share/perl5
FILES_MATCHING PATTERN "*.pm")
install(DIRECTORY ${CMAKE_BINARY_DIR}/templates/blt/ DESTINATION share/git-core/templates)
if(MSGFMT_EXE)
install(DIRECTORY ${CMAKE_BINARY_DIR}/po/build/locale DESTINATION share)
endif()
if(BUILD_TESTING)
#tests-helpers
add_executable(test-fake-ssh ${CMAKE_SOURCE_DIR}/t/helper/test-fake-ssh.c)
target_link_libraries(test-fake-ssh common-main)
#test-tool
parse_makefile_for_sources(test-tool_SOURCES "TEST_BUILTINS_OBJS")
list(TRANSFORM test-tool_SOURCES PREPEND "${CMAKE_SOURCE_DIR}/t/helper/")
add_executable(test-tool ${CMAKE_SOURCE_DIR}/t/helper/test-tool.c ${test-tool_SOURCES})
target_link_libraries(test-tool common-main)
set_target_properties(test-fake-ssh test-tool
PROPERTIES RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/t/helper)
if(MSVC)
set_target_properties(test-fake-ssh test-tool
PROPERTIES RUNTIME_OUTPUT_DIRECTORY_DEBUG ${CMAKE_BINARY_DIR}/t/helper)
set_target_properties(test-fake-ssh test-tool
PROPERTIES RUNTIME_OUTPUT_DIRECTORY_RELEASE ${CMAKE_BINARY_DIR}/t/helper)
endif()
#wrapper scripts
set(wrapper_scripts
git git-upload-pack git-receive-pack git-upload-archive git-shell git-remote-ext)
set(wrapper_test_scripts
test-fake-ssh test-tool)
foreach(script ${wrapper_scripts})
file(STRINGS ${CMAKE_SOURCE_DIR}/wrap-for-bin.sh content NEWLINE_CONSUME)
string(REPLACE "@@BUILD_DIR@@" "${CMAKE_BINARY_DIR}" content "${content}")
string(REPLACE "@@PROG@@" "${script}${EXE_EXTENSION}" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/bin-wrappers/${script} ${content})
endforeach()
foreach(script ${wrapper_test_scripts})
file(STRINGS ${CMAKE_SOURCE_DIR}/wrap-for-bin.sh content NEWLINE_CONSUME)
string(REPLACE "@@BUILD_DIR@@" "${CMAKE_BINARY_DIR}" content "${content}")
string(REPLACE "@@PROG@@" "t/helper/${script}${EXE_EXTENSION}" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/bin-wrappers/${script} ${content})
endforeach()
file(STRINGS ${CMAKE_SOURCE_DIR}/wrap-for-bin.sh content NEWLINE_CONSUME)
string(REPLACE "@@BUILD_DIR@@" "${CMAKE_BINARY_DIR}" content "${content}")
string(REPLACE "@@PROG@@" "git-cvsserver" content "${content}")
file(WRITE ${CMAKE_BINARY_DIR}/bin-wrappers/git-cvsserver ${content})
#options for configuring test options
option(PERL_TESTS "Perform tests that use perl" ON)
option(PYTHON_TESTS "Perform tests that use python" ON)
#GIT-BUILD-OPTIONS
set(TEST_SHELL_PATH ${SHELL_PATH})
set(DIFF diff)
set(PYTHON_PATH /usr/bin/python)
set(TAR tar)
set(NO_CURL )
set(NO_EXPAT )
set(USE_LIBPCRE1 )
set(USE_LIBPCRE2 )
set(NO_LIBPCRE1_JIT )
set(NO_PERL )
set(NO_PTHREADS )
set(NO_PYTHON )
set(PAGER_ENV "LESS=FRX LV=-c")
set(DC_SHA1 YesPlease)
set(RUNTIME_PREFIX true)
set(NO_GETTEXT )
if(NOT CURL_FOUND)
set(NO_CURL 1)
endif()
if(NOT EXPAT_FOUND)
set(NO_EXPAT 1)
endif()
if(NOT Intl_FOUND)
set(NO_GETTEXT 1)
endif()
if(NOT PERL_TESTS)
set(NO_PERL 1)
endif()
if(NOT PYTHON_TESTS)
set(NO_PYTHON 1)
endif()
file(WRITE ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "SHELL_PATH='${SHELL_PATH}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "TEST_SHELL_PATH='${TEST_SHELL_PATH}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PERL_PATH='${PERL_PATH}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "DIFF='${DIFF}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PYTHON_PATH='${PYTHON_PATH}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "TAR='${TAR}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_CURL='${NO_CURL}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_EXPAT='${NO_EXPAT}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "USE_LIBPCRE1='${USE_LIBPCRE1}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_LIBPCRE1_JIT='${NO_LIBPCRE1_JIT}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_PERL='${NO_PERL}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_PTHREADS='${NO_PTHREADS}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_UNIX_SOCKETS='${NO_UNIX_SOCKETS}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PAGER_ENV='${PAGER_ENV}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "DC_SHA1='${DC_SHA1}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "X='${EXE_EXTENSION}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_GETTEXT='${NO_GETTEXT}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "RUNTIME_PREFIX='${RUNTIME_PREFIX}'\n")
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "NO_PYTHON='${NO_PYTHON}'\n")
if(WIN32)
file(APPEND ${CMAKE_BINARY_DIR}/GIT-BUILD-OPTIONS "PATH=\"$PATH:$TEST_DIRECTORY/../compat/vcbuild/vcpkg/installed/x64-windows/bin\"\n")
endif()
#Make the tests work when building out of the source tree
get_filename_component(CACHE_PATH ${CMAKE_CURRENT_LIST_DIR}/../../CMakeCache.txt ABSOLUTE)
if(NOT ${CMAKE_BINARY_DIR}/CMakeCache.txt STREQUAL ${CACHE_PATH})
file(RELATIVE_PATH BUILD_DIR_RELATIVE ${CMAKE_SOURCE_DIR} ${CMAKE_BINARY_DIR}/CMakeCache.txt)
string(REPLACE "/CMakeCache.txt" "" BUILD_DIR_RELATIVE ${BUILD_DIR_RELATIVE})
#Setting the build directory in test-lib.sh before running tests
file(WRITE ${CMAKE_BINARY_DIR}/CTestCustom.cmake
"file(STRINGS ${CMAKE_SOURCE_DIR}/t/test-lib.sh GIT_BUILD_DIR_REPL REGEX \"GIT_BUILD_DIR=(.*)\")\n"
"file(STRINGS ${CMAKE_SOURCE_DIR}/t/test-lib.sh content NEWLINE_CONSUME)\n"
"string(REPLACE \"\${GIT_BUILD_DIR_REPL}\" \"GIT_BUILD_DIR=\\\"$TEST_DIRECTORY/../${BUILD_DIR_RELATIVE}\\\"\" content \"\${content}\")\n"
"file(WRITE ${CMAKE_SOURCE_DIR}/t/test-lib.sh \${content})")
#misc copies
file(COPY ${CMAKE_SOURCE_DIR}/t/chainlint.sed DESTINATION ${CMAKE_BINARY_DIR}/t/)
file(COPY ${CMAKE_SOURCE_DIR}/po/is.po DESTINATION ${CMAKE_BINARY_DIR}/po/)
file(COPY ${CMAKE_SOURCE_DIR}/mergetools/tkdiff DESTINATION ${CMAKE_BINARY_DIR}/mergetools/)
file(COPY ${CMAKE_SOURCE_DIR}/contrib/completion/git-prompt.sh DESTINATION ${CMAKE_BINARY_DIR}/contrib/completion/)
file(COPY ${CMAKE_SOURCE_DIR}/contrib/completion/git-completion.bash DESTINATION ${CMAKE_BINARY_DIR}/contrib/completion/)
endif()
file(GLOB test_scipts "${CMAKE_SOURCE_DIR}/t/t[0-9]*.sh")
#test
foreach(tsh ${test_scipts})
add_test(NAME ${tsh}
COMMAND ${SH_EXE} ${tsh}
WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/t)
endforeach()
endif()#BUILD_TESTING

View file

@ -79,7 +79,9 @@ sub createProject {
if (!$static_library) {
$libs_release = join(";", sort(grep /^(?!libgit\.lib|xdiff\/lib\.lib|vcs-svn\/lib\.lib)/, @{$$build_structure{"$prefix${name}_LIBS"}}));
$libs_debug = $libs_release;
$libs_debug =~ s/zlib\.lib/zlibd\.lib/;
$libs_debug =~ s/zlib\.lib/zlibd\.lib/g;
$libs_debug =~ s/libexpat\.lib/libexpatd\.lib/g;
$libs_debug =~ s/libcurl\.lib/libcurl-d\.lib/g;
}
$defines =~ s/-D//g;
@ -119,13 +121,13 @@ sub createProject {
<VCPKGArch Condition="'\$(Platform)'=='Win32'">x86-windows</VCPKGArch>
<VCPKGArch Condition="'\$(Platform)'!='Win32'">x64-windows</VCPKGArch>
<VCPKGArchDirectory>$cdup\\compat\\vcbuild\\vcpkg\\installed\\\$(VCPKGArch)</VCPKGArchDirectory>
<VCPKGBinDirectory Condition="'\(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
<VCPKGLibDirectory Condition="'\(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
<VCPKGBinDirectory Condition="'\(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
<VCPKGLibDirectory Condition="'\(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
<VCPKGBinDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\bin</VCPKGBinDirectory>
<VCPKGLibDirectory Condition="'\$(Configuration)'=='Debug'">\$(VCPKGArchDirectory)\\debug\\lib</VCPKGLibDirectory>
<VCPKGBinDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\bin</VCPKGBinDirectory>
<VCPKGLibDirectory Condition="'\$(Configuration)'!='Debug'">\$(VCPKGArchDirectory)\\lib</VCPKGLibDirectory>
<VCPKGIncludeDirectory>\$(VCPKGArchDirectory)\\include</VCPKGIncludeDirectory>
<VCPKGLibs Condition="'\(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
<VCPKGLibs Condition="'\(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
<VCPKGLibs Condition="'\$(Configuration)'=='Debug'">$libs_debug</VCPKGLibs>
<VCPKGLibs Condition="'\$(Configuration)'!='Debug'">$libs_release</VCPKGLibs>
</PropertyGroup>
<Import Project="\$(VCTargetsPath)\\Microsoft.Cpp.Default.props" />
<PropertyGroup Condition="'\$(Configuration)'=='Debug'" Label="Configuration">
@ -277,6 +279,9 @@ EOM
if ($target eq 'git') {
print F " <Import Project=\"LinkOrCopyBuiltins.targets\" />\n";
}
if ($target eq 'git-remote-http') {
print F " <Import Project=\"LinkOrCopyRemoteHttp.targets\" />\n";
}
print F << "EOM";
</Project>
EOM

View file

@ -343,13 +343,13 @@ sub handleLinkLine
} elsif ("$part" eq "-lz") {
push(@libs, "zlib.lib");
} elsif ("$part" eq "-lcrypto") {
push(@libs, "libeay32.lib");
push(@libs, "libcrypto.lib");
} elsif ("$part" eq "-lssl") {
push(@libs, "ssleay32.lib");
push(@libs, "libssl.lib");
} elsif ("$part" eq "-lcurl") {
push(@libs, "libcurl.lib");
} elsif ("$part" eq "-lexpat") {
push(@libs, "expat.lib");
push(@libs, "libexpat.lib");
} elsif ("$part" eq "-liconv") {
push(@libs, "libiconv.lib");
} elsif ($part =~ /^[-\/]/) {

View file

@ -20,7 +20,7 @@ expression s;
+ set_commit_tree(c, s)
...>}
// These excluded functions must access c->maybe_tree direcly.
// These excluded functions must access c->maybe_tree directly.
// Note that if c->maybe_tree is written somewhere outside of these
// functions, then the recommended transformation will be bogus with
// repo_get_commit_tree() on the LHS.
@ -32,3 +32,21 @@ expression c;
- c->maybe_tree
+ repo_get_commit_tree(specify_the_right_repo_here, c)
...>}
@@
struct commit *c;
expression E;
@@
(
- c->generation = E;
+ commit_graph_data_at(c)->generation = E;
|
- c->graph_pos = E;
+ commit_graph_data_at(c)->graph_pos = E;
|
- c->generation
+ commit_graph_generation(c)
|
- c->graph_pos
+ commit_graph_position(c)
)

View file

@ -0,0 +1,16 @@
@ hashmap_entry_init_usage @
expression E;
struct hashmap_entry HME;
@@
- HME.hash = E;
+ hashmap_entry_init(&HME, E);
@@
identifier f !~ "^hashmap_entry_init$";
expression E;
struct hashmap_entry *HMEP;
@@
f(...) {<...
- HMEP->hash = E;
+ hashmap_entry_init(HMEP, E);
...>}

View file

@ -10,38 +10,6 @@ struct object_id *OIDPTR;
- is_null_sha1(OIDPTR->hash)
+ is_null_oid(OIDPTR)
@@
struct object_id OID;
@@
- sha1_to_hex(OID.hash)
+ oid_to_hex(&OID)
@@
identifier f != oid_to_hex;
struct object_id *OIDPTR;
@@
f(...) {<...
- sha1_to_hex(OIDPTR->hash)
+ oid_to_hex(OIDPTR)
...>}
@@
expression E;
struct object_id OID;
@@
- sha1_to_hex_r(E, OID.hash)
+ oid_to_hex_r(E, &OID)
@@
identifier f != oid_to_hex_r;
expression E;
struct object_id *OIDPTR;
@@
f(...) {<...
- sha1_to_hex_r(E, OIDPTR->hash)
+ oid_to_hex_r(E, OIDPTR)
...>}
@@
struct object_id OID;
@@

File diff suppressed because it is too large Load diff

View file

@ -11,8 +11,9 @@
#
# zstyle ':completion:*:*:git:*' script ~/.git-completion.zsh
#
# The recommended way to install this script is to copy to '~/.zsh/_git', and
# then add the following to your ~/.zshrc file:
# The recommended way to install this script is to make a copy of it in
# ~/.zsh/ directory as ~/.zsh/git-completion.zsh and then add the following
# to your ~/.zshrc file:
#
# fpath=(~/.zsh $fpath)
@ -149,9 +150,11 @@ __git_zsh_cmd_common ()
push:'update remote refs along with associated objects'
rebase:'forward-port local commits to the updated upstream head'
reset:'reset current HEAD to the specified state'
restore:'restore working tree files'
rm:'remove files from the working tree and from the index'
show:'show various types of objects'
status:'show the working tree status'
switch:'switch branches'
tag:'create, list, delete or verify a tag object signed with GPG')
_describe -t common-commands 'common commands' list && _ret=0
}

View file

@ -70,6 +70,15 @@
# state symbols by setting GIT_PS1_STATESEPARATOR. The default separator
# is SP.
#
# When there is an in-progress operation such as a merge, rebase,
# revert, cherry-pick, or bisect, the prompt will include information
# related to the operation, often in the form "|<OPERATION-NAME>".
#
# When the repository has a sparse-checkout, a notification of the form
# "|SPARSE" will be included in the prompt. This can be shortened to a
# single '?' character by setting GIT_PS1_COMPRESSSPARSESTATE, or omitted
# by setting GIT_PS1_OMITSPARSESTATE.
#
# By default, __git_ps1 will compare HEAD to your SVN upstream if it can
# find one, or @{upstream} otherwise. Once you have set
# GIT_PS1_SHOWUPSTREAM, you can override it on a per-repository basis by
@ -421,6 +430,13 @@ __git_ps1 ()
return $exit
fi
local sparse=""
if [ -z "${GIT_PS1_COMPRESSSPARSESTATE}" ] &&
[ -z "${GIT_PS1_OMITSPARSESTATE}" ] &&
[ "$(git config --bool core.sparseCheckout)" = "true" ]; then
sparse="|SPARSE"
fi
local r=""
local b=""
local step=""
@ -429,11 +445,7 @@ __git_ps1 ()
__git_eread "$g/rebase-merge/head-name" b
__git_eread "$g/rebase-merge/msgnum" step
__git_eread "$g/rebase-merge/end" total
if [ -f "$g/rebase-merge/interactive" ]; then
r="|REBASE-i"
else
r="|REBASE-m"
fi
r="|REBASE"
else
if [ -d "$g/rebase-apply" ]; then
__git_eread "$g/rebase-apply/next" step
@ -496,6 +508,7 @@ __git_ps1 ()
local i=""
local s=""
local u=""
local h=""
local c=""
local p=""
@ -528,6 +541,11 @@ __git_ps1 ()
u="%${ZSH_VERSION+%}"
fi
if [ -n "${GIT_PS1_COMPRESSSPARSESTATE}" ] &&
[ "$(git config --bool core.sparseCheckout)" = "true" ]; then
h="?"
fi
if [ -n "${GIT_PS1_SHOWUPSTREAM-}" ]; then
__git_ps1_show_upstream
fi
@ -546,8 +564,8 @@ __git_ps1 ()
b="\${__git_ps1_branch_name}"
fi
local f="$w$i$s$u"
local gitstring="$c$b${f:+$z$f}$r$p"
local f="$h$w$i$s$u"
local gitstring="$c$b${f:+$z$f}${sparse}$r$p"
if [ $pcmode = yes ]; then
if [ "${__git_printf_supports_v-}" != yes ]; then

View file

@ -0,0 +1 @@
git-credential-netrc

View file

@ -1,8 +1,30 @@
# The default target of this Makefile is...
all::
test:
SCRIPT_PERL = git-credential-netrc.perl
GIT_ROOT_DIR = ../../..
HERE = contrib/credential/netrc
SCRIPT_PERL_FULL = $(patsubst %,$(HERE)/%,$(SCRIPT_PERL))
all:: build
build:
$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
build-perl-script
install: build
$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
install-perl-script
clean:
$(MAKE) -C $(GIT_ROOT_DIR) SCRIPT_PERL="$(SCRIPT_PERL_FULL)" \
clean-perl-script
test: build
./t-git-credential-netrc.sh
testverbose:
testverbose: build
./t-git-credential-netrc.sh -d -v
.PHONY: all build install clean test testverbose

View file

@ -423,7 +423,7 @@ sub load_config {
# load settings from git config
my $options = shift;
# set from command argument, gpg.program option, or default to gpg
$options->{'gpg'} //= Git->repository()->config('gpg.program')
$options->{'gpg'} //= Git::config('gpg.program')
// 'gpg';
log_verbose("using $options{'gpg'} for GPG operations");
}

View file

@ -72,7 +72,7 @@ sub handle_line {
(?:$COLOR?\|$COLOR?[ ])* # zero or more trailing "|"
[ ]* # trailing whitespace for merges
/x) {
my $graph_prefix = $&;
my $graph_prefix = $&;
# We must flush before setting graph indent, since the
# new commit may be indented differently from what we
@ -112,7 +112,7 @@ sub handle_line {
# Since we can receive arbitrary input, there's no optimal
# place to flush. Flushing on a blank line is a heuristic that
# happens to match git-log output.
if (!length) {
if (/^$/) {
$flush_cb->();
}
}

View file

@ -139,6 +139,8 @@ foreach my $tar_file (@ARGV)
print FI "\n";
}
next if ($typeflag eq 'g'); # ignore global header
my $path;
if ($prefix) {
$path = "$prefix/$name";

View file

@ -42,7 +42,7 @@ hgnewcsets = 0
def usage():
print """\
print("""\
%s: [OPTIONS] <hgprj>
options:
@ -54,7 +54,7 @@ options:
required:
hgprj: name of the HG project to import (directory)
""" % sys.argv[0]
""" % sys.argv[0])
#------------------------------------------------------------------------------
@ -104,22 +104,22 @@ os.chdir(hgprj)
if state:
if os.path.exists(state):
if verbose:
print 'State does exist, reading'
print('State does exist, reading')
f = open(state, 'r')
hgvers = pickle.load(f)
else:
print 'State does not exist, first run'
print('State does not exist, first run')
sock = os.popen('hg tip --template "{rev}"')
tip = sock.read()
if sock.close():
sys.exit(1)
if verbose:
print 'tip is', tip
print('tip is', tip)
# Calculate the branches
if verbose:
print 'analysing the branches...'
print('analysing the branches...')
hgchildren["0"] = ()
hgparents["0"] = (None, None)
hgbranch["0"] = "master"
@ -154,15 +154,15 @@ for cset in range(1, int(tip) + 1):
else:
hgbranch[str(cset)] = "branch-" + str(cset)
if not hgvers.has_key("0"):
print 'creating repository'
if "0" not in hgvers:
print('creating repository')
os.system('git init')
# loop through every hg changeset
for cset in range(int(tip) + 1):
# incremental, already seen
if hgvers.has_key(str(cset)):
if str(cset) in hgvers:
continue
hgnewcsets += 1
@ -180,27 +180,27 @@ for cset in range(int(tip) + 1):
os.write(fdcomment, csetcomment)
os.close(fdcomment)
print '-----------------------------------------'
print 'cset:', cset
print 'branch:', hgbranch[str(cset)]
print 'user:', user
print 'date:', date
print 'comment:', csetcomment
print('-----------------------------------------')
print('cset:', cset)
print('branch:', hgbranch[str(cset)])
print('user:', user)
print('date:', date)
print('comment:', csetcomment)
if parent:
print 'parent:', parent
print('parent:', parent)
if mparent:
print 'mparent:', mparent
print('mparent:', mparent)
if tag:
print 'tag:', tag
print '-----------------------------------------'
print('tag:', tag)
print('-----------------------------------------')
# checkout the parent if necessary
if cset != 0:
if hgbranch[str(cset)] == "branch-" + str(cset):
print 'creating new branch', hgbranch[str(cset)]
print('creating new branch', hgbranch[str(cset)])
os.system('git checkout -b %s %s' % (hgbranch[str(cset)], hgvers[parent]))
else:
print 'checking out branch', hgbranch[str(cset)]
print('checking out branch', hgbranch[str(cset)])
os.system('git checkout %s' % hgbranch[str(cset)])
# merge
@ -209,7 +209,7 @@ for cset in range(int(tip) + 1):
otherbranch = hgbranch[mparent]
else:
otherbranch = hgbranch[parent]
print 'merging', otherbranch, 'into', hgbranch[str(cset)]
print('merging', otherbranch, 'into', hgbranch[str(cset)])
os.system(getgitenv(user, date) + 'git merge --no-commit -s ours "" %s %s' % (hgbranch[str(cset)], otherbranch))
# remove everything except .git and .hg directories
@ -233,12 +233,12 @@ for cset in range(int(tip) + 1):
# delete branch if not used anymore...
if mparent and len(hgchildren[str(cset)]):
print "Deleting unused branch:", otherbranch
print("Deleting unused branch:", otherbranch)
os.system('git branch -d %s' % otherbranch)
# retrieve and record the version
vvv = os.popen('git show --quiet --pretty=format:%H').read()
print 'record', cset, '->', vvv
print('record', cset, '->', vvv)
hgvers[str(cset)] = vvv
if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
@ -247,7 +247,7 @@ if hgnewcsets >= opt_nrepack and opt_nrepack != -1:
# write the state for incrementals
if state:
if verbose:
print 'Writing state'
print('Writing state')
f = open(state, 'w')
pickle.dump(hgvers, f)

View file

@ -95,7 +95,7 @@ if PYTHON3:
unicode = str
def write_str(f, msg):
# Try outputing with the default encoding. If it fails,
# Try outputting with the default encoding. If it fails,
# try UTF-8.
try:
f.buffer.write(msg.encode(sys.getdefaultencoding()))
@ -2129,7 +2129,7 @@ class SMTPMailer(Mailer):
# equivalent to
# self.smtp.ehlo()
# self.smtp.starttls()
# with acces to the ssl layer
# with access to the ssl layer
self.smtp.ehlo()
if not self.smtp.has_extn("starttls"):
raise smtplib.SMTPException("STARTTLS extension not supported by server")
@ -2148,7 +2148,7 @@ class SMTPMailer(Mailer):
cert_reqs=ssl.CERT_NONE
)
self.environment.get_logger().error(
'*** Warning, the server certificat is not verified (smtp) ***\n'
'*** Warning, the server certificate is not verified (smtp) ***\n'
'*** set the option smtpCACerts ***\n'
)
if not hasattr(self.smtp.sock, "read"):
@ -3189,7 +3189,7 @@ class ProjectdescEnvironmentMixin(Environment):
self.COMPUTED_KEYS += ['projectdesc']
def get_projectdesc(self):
"""Return a one-line descripition of the project."""
"""Return a one-line description of the project."""
git_dir = get_git_dir()
try:

View file

@ -56,7 +56,7 @@ config = git_multimail.Config('multimailhook')
# Set some Git configuration variables. Equivalent to passing var=val
# to "git -c var=val" each time git is called, or to adding the
# configuration in .git/config (must come before instanciating the
# configuration in .git/config (must come before instantiating the
# environment) :
#git_multimail.Config.add_config_parameters('multimailhook.commitEmailFormat=html')
#git_multimail.Config.add_config_parameters(('user.name=foo', 'user.email=foo@example.com'))

View file

@ -329,7 +329,7 @@ generate_update_branch_email()
#
# git rev-parse --not --all | grep -v $(git rev-parse $refname)
#
# Get's us to something pretty safe (apart from the small time
# Gets us to something pretty safe (apart from the small time
# between refname being read, and git rev-parse running - for that,
# I give up)
#

View file

@ -49,7 +49,7 @@ opcode.
Repository sections are matched on the basename of the repository
(after removing the .git suffix).
The opcode abbrevations are:
The opcode abbreviations are:
C: create new ref
D: delete existing ref

View file

@ -14,7 +14,7 @@
# This rule states that each system call should have its return value checked
# The problem is that it includes the print call. Checking every print call's
# return value would be harmful to the code readabilty.
# return value would be harmful to the code readability.
# This configuration keeps all default function but print.
[InputOutput::RequireCheckedSyscalls]
functions = open say close

View file

@ -6,7 +6,7 @@
# License: GPL v2 or later
# Set of tools for git repo with a mediawiki remote.
# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
use strict;
use warnings;

View file

@ -9,7 +9,7 @@
# License: GPL v2 or later
# Gateway between Git and MediaWiki.
# Documentation & bugtracker: https://github.com/moy/Git-Mediawiki/
# Documentation & bugtracker: https://github.com/Git-Mediawiki/Git-Mediawiki
use strict;
use MediaWiki::API;
@ -56,38 +56,38 @@ my $url = $ARGV[1];
# Accept both space-separated and multiple keys in config file.
# Spaces should be written as _ anyway because we'll use chomp.
my @tracked_pages = split(/[ \n]/, run_git("config --get-all remote.${remotename}.pages"));
my @tracked_pages = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.pages"]));
chomp(@tracked_pages);
# Just like @tracked_pages, but for MediaWiki categories.
my @tracked_categories = split(/[ \n]/, run_git("config --get-all remote.${remotename}.categories"));
my @tracked_categories = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.categories"]));
chomp(@tracked_categories);
# Just like @tracked_categories, but for MediaWiki namespaces.
my @tracked_namespaces = split(/[ \n]/, run_git("config --get-all remote.${remotename}.namespaces"));
my @tracked_namespaces = split(/[ \n]/, run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaces"]));
for (@tracked_namespaces) { s/_/ /g; }
chomp(@tracked_namespaces);
# Import media files on pull
my $import_media = run_git("config --get --bool remote.${remotename}.mediaimport");
my $import_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaimport"]);
chomp($import_media);
$import_media = ($import_media eq 'true');
# Export media files on push
my $export_media = run_git("config --get --bool remote.${remotename}.mediaexport");
my $export_media = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.mediaexport"]);
chomp($export_media);
$export_media = !($export_media eq 'false');
my $wiki_login = run_git("config --get remote.${remotename}.mwLogin");
# Note: mwPassword is discourraged. Use the credential system instead.
my $wiki_passwd = run_git("config --get remote.${remotename}.mwPassword");
my $wiki_domain = run_git("config --get remote.${remotename}.mwDomain");
my $wiki_login = run_git_quoted(["config", "--get", "remote.${remotename}.mwLogin"]);
# Note: mwPassword is discouraged. Use the credential system instead.
my $wiki_passwd = run_git_quoted(["config", "--get", "remote.${remotename}.mwPassword"]);
my $wiki_domain = run_git_quoted(["config", "--get", "remote.${remotename}.mwDomain"]);
chomp($wiki_login);
chomp($wiki_passwd);
chomp($wiki_domain);
# Import only last revisions (both for clone and fetch)
my $shallow_import = run_git("config --get --bool remote.${remotename}.shallow");
my $shallow_import = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.shallow"]);
chomp($shallow_import);
$shallow_import = ($shallow_import eq 'true');
@ -97,9 +97,9 @@ $shallow_import = ($shallow_import eq 'true');
# Possible values:
# - by_rev: perform one query per new revision on the remote wiki
# - by_page: query each tracked page for new revision
my $fetch_strategy = run_git("config --get remote.${remotename}.fetchStrategy");
my $fetch_strategy = run_git_quoted(["config", "--get", "remote.${remotename}.fetchStrategy"]);
if (!$fetch_strategy) {
$fetch_strategy = run_git('config --get mediawiki.fetchStrategy');
$fetch_strategy = run_git_quoted(["config", "--get", "mediawiki.fetchStrategy"]);
}
chomp($fetch_strategy);
if (!$fetch_strategy) {
@ -123,9 +123,9 @@ my %basetimestamps;
# will get the history with information lost). If the import is
# deterministic, this means everybody gets the same sha1 for each
# MediaWiki revision.
my $dumb_push = run_git("config --get --bool remote.${remotename}.dumbPush");
my $dumb_push = run_git_quoted(["config", "--get", "--bool", "remote.${remotename}.dumbPush"]);
if (!$dumb_push) {
$dumb_push = run_git('config --get --bool mediawiki.dumbPush');
$dumb_push = run_git_quoted(["config", "--get", "--bool", "mediawiki.dumbPush"]);
}
chomp($dumb_push);
$dumb_push = ($dumb_push eq 'true');
@ -369,12 +369,14 @@ sub get_mw_pages {
return %pages;
}
# usage: $out = run_git("command args");
# $out = run_git("command args", "raw"); # don't interpret output as UTF-8.
sub run_git {
# usage: $out = run_git_quoted(["command", "args", ...]);
# $out = run_git_quoted(["command", "args", ...], "raw"); # don't interpret output as UTF-8.
# $out = run_git_quoted_nostderr(["command", "args", ...]); # discard stderr
# $out = run_git_quoted_nostderr(["command", "args", ...], "raw"); # ditto but raw instead of UTF-8 as above
sub _run_git {
my $args = shift;
my $encoding = (shift || 'encoding(UTF-8)');
open(my $git, "-|:${encoding}", "git ${args}")
open(my $git, "-|:${encoding}", @$args)
or die "Unable to fork: $!\n";
my $res = do {
local $/ = undef;
@ -385,6 +387,13 @@ sub run_git {
return $res;
}
sub run_git_quoted {
_run_git(["git", @{$_[0]}], $_[1]);
}
sub run_git_quoted_nostderr {
_run_git(['sh', '-c', 'git "$@" 2>/dev/null', '--', @{$_[0]}], $_[1]);
}
sub get_all_mediafiles {
my $pages = shift;
@ -511,8 +520,9 @@ sub download_mw_mediafile {
}
sub get_last_local_revision {
# Get note regarding last mediawiki revision
my $note = run_git("notes --ref=${remotename}/mediawiki show refs/mediawiki/${remotename}/master 2>/dev/null");
# Get note regarding last mediawiki revision.
my $note = run_git_quoted_nostderr(["notes", "--ref=${remotename}/mediawiki",
"show", "refs/mediawiki/${remotename}/master"]);
my @note_info = split(/ /, $note);
my $lastrevision_number;
@ -807,7 +817,10 @@ sub get_more_refs {
sub mw_import {
# multiple import commands can follow each other.
my @refs = (shift, get_more_refs('import'));
my $processedRefs;
foreach my $ref (@refs) {
next if $processedRefs->{$ref}; # skip duplicates: "import refs/heads/master" being issued twice; TODO: why?
$processedRefs->{$ref} = 1;
mw_import_ref($ref);
}
print {*STDOUT} "done\n";
@ -970,7 +983,7 @@ sub mw_import_revids {
}
sub error_non_fast_forward {
my $advice = run_git('config --bool advice.pushNonFastForward');
my $advice = run_git_quoted(["config", "--bool", "advice.pushNonFastForward"]);
chomp($advice);
if ($advice ne 'false') {
# Native git-push would show this after the summary.
@ -1014,7 +1027,7 @@ sub mw_upload_file {
}
} else {
# Don't let perl try to interpret file content as UTF-8 => use "raw"
my $content = run_git("cat-file blob ${new_sha1}", 'raw');
my $content = run_git_quoted(["cat-file", "blob", $new_sha1], 'raw');
if ($content ne EMPTY) {
$mediawiki = connect_maybe($mediawiki, $remotename, $url);
$mediawiki->{config}->{upload_url} =
@ -1084,7 +1097,7 @@ sub mw_push_file {
# with this content instead:
$file_content = DELETED_CONTENT;
} else {
$file_content = run_git("cat-file blob ${new_sha1}");
$file_content = run_git_quoted(["cat-file", "blob", $new_sha1]);
}
$mediawiki = connect_maybe($mediawiki, $remotename, $url);
@ -1174,10 +1187,10 @@ sub mw_push_revision {
my $mw_revision = $last_remote_revid;
# Get sha1 of commit pointed by local HEAD
my $HEAD_sha1 = run_git("rev-parse ${local} 2>/dev/null");
my $HEAD_sha1 = run_git_quoted_nostderr(["rev-parse", $local]);
chomp($HEAD_sha1);
# Get sha1 of commit pointed by remotes/$remotename/master
my $remoteorigin_sha1 = run_git("rev-parse refs/remotes/${remotename}/master 2>/dev/null");
my $remoteorigin_sha1 = run_git_quoted_nostderr(["rev-parse", "refs/remotes/${remotename}/master"]);
chomp($remoteorigin_sha1);
if ($last_local_revid > 0 &&
@ -1197,7 +1210,7 @@ sub mw_push_revision {
my $parsed_sha1 = $remoteorigin_sha1;
# Find a path from last MediaWiki commit to pushed commit
print {*STDERR} "Computing path from local to remote ...\n";
my @local_ancestry = split(/\n/, run_git("rev-list --boundary --parents ${local} ^${parsed_sha1}"));
my @local_ancestry = split(/\n/, run_git_quoted(["rev-list", "--boundary", "--parents", $local, "^${parsed_sha1}"]));
my %local_ancestry;
foreach my $line (@local_ancestry) {
if (my ($child, $parents) = $line =~ /^-?([a-f0-9]+) ([a-f0-9 ]+)/) {
@ -1221,7 +1234,7 @@ sub mw_push_revision {
# No remote mediawiki revision. Export the whole
# history (linearized with --first-parent)
print {*STDERR} "Warning: no common ancestor, pushing complete history\n";
my $history = run_git("rev-list --first-parent --children ${local}");
my $history = run_git_quoted(["rev-list", "--first-parent", "--children", $local]);
my @history = split(/\n/, $history);
@history = @history[1..$#history];
foreach my $line (reverse @history) {
@ -1233,12 +1246,12 @@ sub mw_push_revision {
foreach my $commit_info_split (@commit_pairs) {
my $sha1_child = @{$commit_info_split}[0];
my $sha1_commit = @{$commit_info_split}[1];
my $diff_infos = run_git("diff-tree -r --raw -z ${sha1_child} ${sha1_commit}");
my $diff_infos = run_git_quoted(["diff-tree", "-r", "--raw", "-z", $sha1_child, $sha1_commit]);
# TODO: we could detect rename, and encode them with a #redirect on the wiki.
# TODO: for now, it's just a delete+add
my @diff_info_list = split(/\0/, $diff_infos);
# Keep the subject line of the commit message as mediawiki comment for the revision
my $commit_msg = run_git(qq(log --no-walk --format="%s" ${sha1_commit}));
my $commit_msg = run_git_quoted(["log", "--no-walk", '--format="%s"', $sha1_commit]);
chomp($commit_msg);
# Push every blob
while (@diff_info_list) {
@ -1263,7 +1276,10 @@ sub mw_push_revision {
}
}
if (!$dumb_push) {
run_git(qq(notes --ref=${remotename}/mediawiki add -f -m "mediawiki_revision: ${mw_revision}" ${sha1_commit}));
run_git_quoted(["notes", "--ref=${remotename}/mediawiki",
"add", "-f", "-m",
"mediawiki_revision: ${mw_revision}",
$sha1_commit]);
}
}
@ -1304,7 +1320,7 @@ sub get_mw_namespace_id {
# already cached. Namespaces are stored in form:
# "Name_of_namespace:Id_namespace", ex.: "File:6".
my @temp = split(/\n/,
run_git("config --get-all remote.${remotename}.namespaceCache"));
run_git_quoted(["config", "--get-all", "remote.${remotename}.namespaceCache"]));
chomp(@temp);
foreach my $ns (@temp) {
my ($n, $id) = split(/:/, $ns);
@ -1358,7 +1374,7 @@ sub get_mw_namespace_id {
# Store explicitly requested namespaces on disk
if (!exists $cached_mw_namespace_id{$name}) {
run_git(qq(config --add remote.${remotename}.namespaceCache "${name}:${store_id}"));
run_git_quoted(["config", "--add", "remote.${remotename}.namespaceCache", "${name}:${store_id}"]);
$cached_mw_namespace_id{$name} = 1;
}
return $id;

View file

@ -4,4 +4,4 @@ objects from mediawiki just as one would do with a classic git
repository thanks to remote-helpers.
For more information, visit the wiki at
https://github.com/moy/Git-Mediawiki/wiki
https://github.com/Git-Mediawiki/Git-Mediawiki

View file

@ -1,4 +1,4 @@
WEB/
wiki/
mediawiki/
trash directory.t*/
test-results/

View file

@ -14,11 +14,11 @@ install the following packages (Debian/Ubuntu names, may need to be
adapted for another distribution):
* lighttpd
* php5
* php5-cgi
* php5-cli
* php5-curl
* php5-sqlite
* php
* php-cgi
* php-cli
* php-curl
* php-sqlite
Principles and Technical Choices
--------------------------------

View file

@ -1 +0,0 @@
wikidb.sqlite

View file

@ -1,129 +0,0 @@
<?php
# This file was automatically generated by the MediaWiki 1.19.0
# installer. If you make manual changes, please keep track in case you
# need to recreate them later.
#
# See includes/DefaultSettings.php for all configurable settings
# and their default values, but don't forget to make changes in _this_
# file, not there.
#
# Further documentation for configuration settings may be found at:
# http://www.mediawiki.org/wiki/Manual:Configuration_settings
# Protect against web entry
if ( !defined( 'MEDIAWIKI' ) ) {
exit;
}
## Uncomment this to disable output compression
# $wgDisableOutputCompression = true;
$wgSitename = "Git-MediaWiki-Test";
$wgMetaNamespace = "Git-MediaWiki-Test";
## The URL base path to the directory containing the wiki;
## defaults for all runtime URL paths are based off of this.
## For more information on customizing the URLs please see:
## http://www.mediawiki.org/wiki/Manual:Short_URL
$wgScriptPath = "@WG_SCRIPT_PATH@";
$wgScriptExtension = ".php";
## The protocol and server name to use in fully-qualified URLs
$wgServer = "@WG_SERVER@";
## The relative URL path to the skins directory
$wgStylePath = "$wgScriptPath/skins";
## The relative URL path to the logo. Make sure you change this from the default,
## or else you'll overwrite your logo when you upgrade!
$wgLogo = "$wgStylePath/common/images/wiki.png";
## UPO means: this is also a user preference option
$wgEnableEmail = true;
$wgEnableUserEmail = true; # UPO
$wgEmergencyContact = "apache@localhost";
$wgPasswordSender = "apache@localhost";
$wgEnotifUserTalk = false; # UPO
$wgEnotifWatchlist = false; # UPO
$wgEmailAuthentication = true;
## Database settings
$wgDBtype = "sqlite";
$wgDBserver = "";
$wgDBname = "@WG_SQLITE_DATAFILE@";
$wgDBuser = "";
$wgDBpassword = "";
# SQLite-specific settings
$wgSQLiteDataDir = "@WG_SQLITE_DATADIR@";
## Shared memory settings
$wgMainCacheType = CACHE_NONE;
$wgMemCachedServers = array();
## To enable image uploads, make sure the 'images' directory
## is writable, then set this to true:
$wgEnableUploads = true;
$wgUseImageMagick = true;
$wgImageMagickConvertCommand ="@CONVERT@";
$wgFileExtensions[] = 'txt';
# InstantCommons allows wiki to use images from http://commons.wikimedia.org
$wgUseInstantCommons = false;
## If you use ImageMagick (or any other shell command) on a
## Linux server, this will need to be set to the name of an
## available UTF-8 locale
$wgShellLocale = "en_US.utf8";
## If you want to use image uploads under safe mode,
## create the directories images/archive, images/thumb and
## images/temp, and make them all writable. Then uncomment
## this, if it's not already uncommented:
#$wgHashedUploadDirectory = false;
## Set $wgCacheDirectory to a writable directory on the web server
## to make your wiki go slightly faster. The directory should not
## be publicly accessible from the web.
#$wgCacheDirectory = "$IP/cache";
# Site language code, should be one of the list in ./languages/Names.php
$wgLanguageCode = "en";
$wgSecretKey = "1c912bfe3519fb70f5dc523ecc698111cd43d81a11c585b3eefb28f29c2699b7";
#$wgSecretKey = "@SECRETKEY@";
# Site upgrade key. Must be set to a string (default provided) to turn on the
# web installer while LocalSettings.php is in place
$wgUpgradeKey = "ddae7dc87cd0a645";
## Default skin: you can change the default skin. Use the internal symbolic
## names, ie 'standard', 'nostalgia', 'cologneblue', 'monobook', 'vector':
$wgDefaultSkin = "vector";
## For attaching licensing metadata to pages, and displaying an
## appropriate copyright notice / icon. GNU Free Documentation
## License and Creative Commons licenses are supported so far.
$wgRightsPage = ""; # Set to the title of a wiki page that describes your license/copyright
$wgRightsUrl = "";
$wgRightsText = "";
$wgRightsIcon = "";
# Path to the GNU diff3 utility. Used for conflict resolution.
$wgDiff3 = "/usr/bin/diff3";
# Query string length limit for ResourceLoader. You should only set this if
# your web server has a query string length limit (then set it to that limit),
# or if you have suhosin.get.max_value_length set in php.ini (then set it to
# that value)
$wgResourceLoaderMaxQueryLength = -1;
# End of automatically generated settings.
# Add more configuration options below.

View file

@ -1,120 +0,0 @@
<?php
/**
* This script generates a SQLite database for a MediaWiki version 1.19.0
* You must specify the login of the admin (argument 1) and its
* password (argument 2) and the folder where the database file
* is located (absolute path in argument 3).
* It is used by the script install-wiki.sh in order to make easy the
* installation of a MediaWiki.
*
* In order to generate a SQLite database file, MediaWiki ask the user
* to submit some forms in its web browser. This script simulates this
* behavior though the functions <get> and <submit>
*
*/
$argc = $_SERVER['argc'];
$argv = $_SERVER['argv'];
$login = $argv[2];
$pass = $argv[3];
$tmp = $argv[4];
$port = $argv[5];
$url = 'http://localhost:'.$port.'/wiki/mw-config/index.php';
$db_dir = urlencode($tmp);
$tmp_cookie = tempnam($tmp, "COOKIE_");
/*
* Fetchs a page with cURL.
*/
function get($page_name = "") {
$curl = curl_init();
$page_name_add = "";
if ($page_name != "") {
$page_name_add = '?page='.$page_name;
}
$url = $GLOBALS['url'].$page_name_add;
$tmp_cookie = $GLOBALS['tmp_cookie'];
curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
curl_setopt($curl, CURLOPT_HEADER, true);
curl_setopt($curl, CURLOPT_URL, $url);
$page = curl_exec($curl);
if (!$page) {
die("Could not get page: $url\n");
}
curl_close($curl);
return $page;
}
/*
* Submits a form with cURL.
*/
function submit($page_name, $option = "") {
$curl = curl_init();
$datapost = 'submit-continue=Continue+%E2%86%92';
if ($option != "") {
$datapost = $option.'&'.$datapost;
}
$url = $GLOBALS['url'].'?page='.$page_name;
$tmp_cookie = $GLOBALS['tmp_cookie'];
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_POST, true);
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, true);
curl_setopt($curl, CURLOPT_POSTFIELDS, $datapost);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_COOKIEJAR, $tmp_cookie);
curl_setopt($curl, CURLOPT_COOKIEFILE, $tmp_cookie);
$page = curl_exec($curl);
if (!$page) {
die("Could not get page: $url\n");
}
curl_close($curl);
return "$page";
}
/*
* Here starts this script: simulates the behavior of the user
* submitting forms to generates the database file.
* Note this simulation was made for the MediaWiki version 1.19.0,
* we can't assume it works with other versions.
*
*/
$page = get();
if (!preg_match('/input type="hidden" value="([0-9]+)" name="LanguageRequestTime"/',
$page, $matches)) {
echo "Unexpected content for page downloaded:\n";
echo "$page";
die;
};
$timestamp = $matches[1];
$language = "LanguageRequestTime=$timestamp&uselang=en&ContLang=en";
$page = submit('Language', $language);
submit('Welcome');
$db_config = 'DBType=sqlite';
$db_config = $db_config.'&sqlite_wgSQLiteDataDir='.$db_dir;
$db_config = $db_config.'&sqlite_wgDBname='.$argv[1];
submit('DBConnect', $db_config);
$wiki_config = 'config_wgSitename=TEST';
$wiki_config = $wiki_config.'&config__NamespaceType=site-name';
$wiki_config = $wiki_config.'&config_wgMetaNamespace=MyWiki';
$wiki_config = $wiki_config.'&config__AdminName='.$login;
$wiki_config = $wiki_config.'&config__AdminPassword='.$pass;
$wiki_config = $wiki_config.'&config__AdminPassword2='.$pass;
$wiki_config = $wiki_config.'&wiki__configEmail=email%40email.org';
$wiki_config = $wiki_config.'&config__SkipOptional=skip';
submit('Name', $wiki_config);
submit('Install');
submit('Install');
unlink($tmp_cookie);
?>

View file

@ -28,7 +28,7 @@ test_expect_success 'Git clone creates the expected git log with one file' '
git log --format=%s HEAD^..HEAD >log.tmp
) &&
echo "this must be the same" >msg.tmp &&
diff -b mw_dir_1/log.tmp msg.tmp
test_cmp msg.tmp mw_dir_1/log.tmp
'
@ -50,8 +50,8 @@ test_expect_success 'Git clone creates the expected git log with multiple files'
echo "this must be the same" >>msgDaddy.tmp &&
echo "identical too" >msgDj.tmp &&
echo "identical" >>msgDj.tmp &&
diff -b mw_dir_2/logDaddy.tmp msgDaddy.tmp &&
diff -b mw_dir_2/logDj.tmp msgDj.tmp
test_cmp msgDaddy.tmp mw_dir_2/logDaddy.tmp &&
test_cmp msgDj.tmp mw_dir_2/logDj.tmp
'
@ -135,7 +135,7 @@ test_expect_success 'Git clone works with one specific page cloned ' '
cd mw_dir_8 &&
echo "this log must stay" >msg.tmp &&
git log --format=%s >log.tmp &&
diff -b msg.tmp log.tmp
test_cmp msg.tmp log.tmp
) &&
wiki_check_content mw_dir_8/Namnam.mw Namnam
'
@ -143,7 +143,7 @@ test_expect_success 'Git clone works with one specific page cloned ' '
test_expect_success 'Git clone works with multiple specific page cloned ' '
wiki_reset &&
wiki_editpage foo "I will be there" false &&
wiki_editpage bar "I will not disapear" false &&
wiki_editpage bar "I will not disappear" false &&
wiki_editpage namnam "I be erased" false &&
wiki_editpage nyancat "nyan nyan nyan you will not erase me" false &&
wiki_delete_page namnam &&

View file

@ -27,12 +27,12 @@ test_git_reimport () {
# Don't bother with permissions, be administrator by default
test_expect_success 'setup config' '
git config --global remote.origin.mwLogin WikiAdmin &&
git config --global remote.origin.mwPassword AdminPass &&
git config --global remote.origin.mwLogin "$WIKI_ADMIN" &&
git config --global remote.origin.mwPassword "$WIKI_PASSW" &&
test_might_fail git config --global --unset remote.origin.mediaImport
'
test_expect_success 'git push can upload media (File:) files' '
test_expect_failure 'git push can upload media (File:) files' '
wiki_reset &&
git clone mediawiki::'"$WIKI_URL"' mw_dir &&
(
@ -48,13 +48,14 @@ test_expect_success 'git push can upload media (File:) files' '
)
'
test_expect_success 'git clone works on previously created wiki with media files' '
test_expect_failure 'git clone works on previously created wiki with media files' '
test_when_finished "rm -rf mw_dir mw_dir_clone" &&
git clone -c remote.origin.mediaimport=true \
mediawiki::'"$WIKI_URL"' mw_dir_clone &&
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt &&
(cd mw_dir_clone && git checkout HEAD^) &&
(cd mw_dir && git checkout HEAD^) &&
test_path_is_file mw_dir_clone/Foo.txt &&
test_cmp mw_dir_clone/Foo.txt mw_dir/Foo.txt
'

View file

@ -13,7 +13,8 @@
. ./test.config
WIKI_URL=http://"$SERVER_ADDR:$PORT/$WIKI_DIR_NAME"
WIKI_BASE_URL=http://$SERVER_ADDR:$PORT
WIKI_URL=$WIKI_BASE_URL/$WIKI_DIR_NAME
CURR_DIR=$(pwd)
TEST_OUTPUT_DIRECTORY=$(pwd)
TEST_DIRECTORY="$CURR_DIR"/../../../t
@ -65,7 +66,7 @@ test_check_precond () {
GIT_EXEC_PATH=$(cd "$(dirname "$0")" && cd "../.." && pwd)
PATH="$GIT_EXEC_PATH"'/bin-wrapper:'"$PATH"
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ];
if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME"
then
skip_all='skipping gateway git-mw tests, no mediawiki found'
test_done
@ -279,7 +280,7 @@ start_lighttpd () {
"$LIGHTTPD_DIR"/lighttpd -f "$WEB"/lighttpd.conf
if test $? -ne 0 ; then
echo "Could not execute http deamon lighttpd"
echo "Could not execute http daemon lighttpd"
exit 1
fi
}
@ -291,27 +292,59 @@ stop_lighttpd () {
test -f "$WEB_TMP/pid" && kill $(cat "$WEB_TMP/pid")
}
# Create the SQLite database of the MediaWiki. If the database file already
# exists, it will be deleted.
# This script should be runned from the directory where $FILES_FOLDER is
# located.
create_db () {
rm -f "$TMP/$DB_FILE"
wiki_delete_db () {
rm -rf \
"$FILES_FOLDER_DB"/* || error "Couldn't delete $FILES_FOLDER_DB/"
}
echo "Generating the SQLite database file. It can take some time ..."
# Run the php script to generate the SQLite database file
# with cURL calls.
php "$FILES_FOLDER/$DB_INSTALL_SCRIPT" $(basename "$DB_FILE" .sqlite) \
"$WIKI_ADMIN" "$WIKI_PASSW" "$TMP" "$PORT"
wiki_delete_db_backup () {
rm -rf \
"$FILES_FOLDER_POST_INSTALL_DB"/* || error "Couldn't delete $FILES_FOLDER_POST_INSTALL_DB/"
}
if [ ! -f "$TMP/$DB_FILE" ] ; then
error "Can't create database file $TMP/$DB_FILE. Try to run ./install-wiki.sh delete first."
# Install MediaWiki using its install.php script. If the database file
# already exists, it will be deleted.
install_mediawiki () {
localsettings="$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php"
if test -f "$localsettings"
then
error "We already installed the wiki, since $localsettings exists" \
"perhaps you wanted to run 'delete' first?"
fi
# Copy the generated database file into the directory the
# user indicated.
cp "$TMP/$DB_FILE" "$FILES_FOLDER" ||
error "Unable to copy $TMP/$DB_FILE to $FILES_FOLDER"
wiki_delete_db
wiki_delete_db_backup
mkdir \
"$FILES_FOLDER_DB/" \
"$FILES_FOLDER_POST_INSTALL_DB/"
install_script="$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php"
echo "Installing MediaWiki using $install_script. This may take some time ..."
php "$WIKI_DIR_INST/$WIKI_DIR_NAME/maintenance/install.php" \
--server $WIKI_BASE_URL \
--scriptpath /wiki \
--lang en \
--dbtype sqlite \
--dbpath $PWD/$FILES_FOLDER_DB/ \
--pass "$WIKI_PASSW" \
Git-MediaWiki-Test \
"$WIKI_ADMIN" ||
error "Couldn't run $install_script, see errors above. Try to run ./install-wiki.sh delete first."
cat <<-'EOF' >>$localsettings
# Custom settings added by test-gitmw-lib.sh
#
# Uploading text files is needed for
# t9363-mw-to-git-export-import.sh
$wgEnableUploads = true;
$wgFileExtensions[] = 'txt';
EOF
# Copy the initially generated database file into our backup
# folder
cp -R "$FILES_FOLDER_DB/"* "$FILES_FOLDER_POST_INSTALL_DB/" ||
error "Unable to copy $FILES_FOLDER_DB/* to $FILES_FOLDER_POST_INSTALL_DB/*"
}
# Install a wiki in your web server directory.
@ -320,30 +353,33 @@ wiki_install () {
start_lighttpd
fi
SERVER_ADDR=$SERVER_ADDR:$PORT
# In this part, we change directory to $TMP in order to download,
# unpack and copy the files of MediaWiki
(
mkdir -p "$WIKI_DIR_INST/$WIKI_DIR_NAME"
if [ ! -d "$WIKI_DIR_INST/$WIKI_DIR_NAME" ] ; then
if ! test -d "$WIKI_DIR_INST/$WIKI_DIR_NAME"
then
error "Folder $WIKI_DIR_INST/$WIKI_DIR_NAME doesn't exist.
Please create it and launch the script again."
fi
# Fetch MediaWiki's archive if not already present in the TMP directory
# Fetch MediaWiki's archive if not already present in the
# download directory
mkdir -p "$FILES_FOLDER_DOWNLOAD"
MW_FILENAME="mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
cd "$TMP"
if [ ! -f $MW_FILENAME ] ; then
cd "$FILES_FOLDER_DOWNLOAD"
if ! test -f $MW_FILENAME
then
echo "Downloading $MW_VERSION_MAJOR.$MW_VERSION_MINOR sources ..."
wget "http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/$MW_FILENAME" ||
error "Unable to download "\
"http://download.wikimedia.org/mediawiki/$MW_VERSION_MAJOR/"\
"$MW_FILENAME. "\
"Please fix your connection and launch the script again."
echo "$MW_FILENAME downloaded in $(pwd). "\
"You can delete it later if you want."
echo "$MW_FILENAME downloaded in $(pwd)/;" \
"you can delete it later if you want."
else
echo "Reusing existing $MW_FILENAME downloaded in $(pwd)."
echo "Reusing existing $MW_FILENAME downloaded in $(pwd)/"
fi
archive_abs_path=$(pwd)/$MW_FILENAME
cd "$WIKI_DIR_INST/$WIKI_DIR_NAME/" ||
@ -352,48 +388,12 @@ wiki_install () {
error "Unable to extract WikiMedia's files from $archive_abs_path to "\
"$WIKI_DIR_INST/$WIKI_DIR_NAME"
) || exit 1
echo Extracted in "$WIKI_DIR_INST/$WIKI_DIR_NAME"
create_db
# Copy the generic LocalSettings.php in the web server's directory
# And modify parameters according to the ones set at the top
# of this script.
# Note that LocalSettings.php is never modified.
if [ ! -f "$FILES_FOLDER/LocalSettings.php" ] ; then
error "Can't find $FILES_FOLDER/LocalSettings.php " \
"in the current folder. "\
"Please run the script inside its folder."
fi
cp "$FILES_FOLDER/LocalSettings.php" \
"$FILES_FOLDER/LocalSettings-tmp.php" ||
error "Unable to copy $FILES_FOLDER/LocalSettings.php " \
"to $FILES_FOLDER/LocalSettings-tmp.php"
# Parse and set the LocalSettings file of the user according to the
# CONFIGURATION VARIABLES section at the beginning of this script
file_swap="$FILES_FOLDER/LocalSettings-swap.php"
sed "s,@WG_SCRIPT_PATH@,/$WIKI_DIR_NAME," \
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
sed "s,@WG_SERVER@,http://$SERVER_ADDR," \
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
sed "s,@WG_SQLITE_DATADIR@,$TMP," \
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
sed "s,@WG_SQLITE_DATAFILE@,$( basename $DB_FILE .sqlite)," \
"$FILES_FOLDER/LocalSettings-tmp.php" > "$file_swap"
mv "$file_swap" "$FILES_FOLDER/LocalSettings-tmp.php"
mv "$FILES_FOLDER/LocalSettings-tmp.php" \
"$WIKI_DIR_INST/$WIKI_DIR_NAME/LocalSettings.php" ||
error "Unable to move $FILES_FOLDER/LocalSettings-tmp.php" \
"in $WIKI_DIR_INST/$WIKI_DIR_NAME"
echo "File $FILES_FOLDER/LocalSettings.php is set in" \
" $WIKI_DIR_INST/$WIKI_DIR_NAME"
install_mediawiki
echo "Your wiki has been installed. You can check it at
http://$SERVER_ADDR/$WIKI_DIR_NAME"
$WIKI_URL"
}
# Reset the database of the wiki and the password of the admin
@ -401,12 +401,18 @@ wiki_install () {
# Warning: This function must be called only in a subdirectory of t/ directory
wiki_reset () {
# Copy initial database of the wiki
if [ ! -f "../$FILES_FOLDER/$DB_FILE" ] ; then
error "Can't find ../$FILES_FOLDER/$DB_FILE in the current folder."
if ! test -d "../$FILES_FOLDER_DB"
then
error "No wiki database at ../$FILES_FOLDER_DB, not installed yet?"
fi
cp "../$FILES_FOLDER/$DB_FILE" "$TMP" ||
error "Can't copy ../$FILES_FOLDER/$DB_FILE in $TMP"
echo "File $FILES_FOLDER/$DB_FILE is set in $TMP"
if ! test -d "../$FILES_FOLDER_POST_INSTALL_DB"
then
error "No wiki backup database at ../$FILES_FOLDER_POST_INSTALL_DB, failed installation?"
fi
wiki_delete_db
cp -R "../$FILES_FOLDER_POST_INSTALL_DB/"* "../$FILES_FOLDER_DB/" ||
error "Can't copy ../$FILES_FOLDER_POST_INSTALL_DB/* to ../$FILES_FOLDER_DB/*"
echo "File $FILES_FOLDER_DB/* has been reset"
}
# Delete the wiki created in the web server's directory and all its content
@ -420,13 +426,7 @@ wiki_delete () {
rm -rf "$WIKI_DIR_INST/$WIKI_DIR_NAME" ||
error "Wiki's directory $WIKI_DIR_INST/" \
"$WIKI_DIR_NAME could not be deleted"
# Delete the wiki's SQLite database.
rm -f "$TMP/$DB_FILE" ||
error "Database $TMP/$DB_FILE could not be deleted."
fi
# Delete the wiki's SQLite database
rm -f "$TMP/$DB_FILE" || error "Database $TMP/$DB_FILE could not be deleted."
rm -f "$FILES_FOLDER/$DB_FILE"
rm -rf "$TMP/mediawiki-$MW_VERSION_MAJOR.$MW_VERSION_MINOR.tar.gz"
wiki_delete_db
wiki_delete_db_backup
}

View file

@ -24,9 +24,7 @@
use MediaWiki::API;
use Getopt::Long;
use encoding 'utf8';
use DateTime::Format::ISO8601;
use open ':encoding(utf8)';
use constant SLASH_REPLACEMENT => "%2F";
#Parsing of the config file
@ -87,7 +85,7 @@ sub wiki_getpage {
# Replace spaces by underscore in the page name
$pagename =~ s/ /_/g;
$pagename =~ s/\//%2F/g;
open(my $file, ">$destdir/$pagename.mw");
open(my $file, ">:encoding(UTF-8)", "$destdir/$pagename.mw");
print $file "$content";
close ($file);
@ -172,7 +170,7 @@ sub wiki_getallpagename {
cmlimit => 500 },
)
|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
open(my $file, ">all.txt");
open(my $file, ">:encoding(UTF-8)", "all.txt");
foreach my $page (@{$mw_pages}) {
print $file "$page->{title}\n";
}
@ -185,7 +183,7 @@ sub wiki_getallpagename {
aplimit => 500,
})
|| die $mw->{error}->{code}.": ".$mw->{error}->{details};
open(my $file, ">all.txt");
open(my $file, ">:encoding(UTF-8)", "all.txt");
foreach my $page (@{$mw_pages}) {
print $file "$page->{title}\n";
}
@ -214,12 +212,12 @@ my $fct_to_call = shift;
wiki_login($wiki_admin, $wiki_admin_pass);
my %functions_to_call = qw(
upload_file wiki_upload_file
get_page wiki_getpage
delete_page wiki_delete_page
edit_page wiki_editpage
getallpagename wiki_getallpagename
my %functions_to_call = (
upload_file => \&wiki_upload_file,
get_page => \&wiki_getpage,
delete_page => \&wiki_delete_page,
edit_page => \&wiki_editpage,
getallpagename => \&wiki_getallpagename,
);
die "$0 ERROR: wrong argument" unless exists $functions_to_call{$fct_to_call};
&{$functions_to_call{$fct_to_call}}(@ARGV);
$functions_to_call{$fct_to_call}->(map { utf8::decode($_); $_ } @ARGV);

View file

@ -3,15 +3,11 @@ WIKI_DIR_NAME=wiki
# Login and password of the wiki's admin
WIKI_ADMIN=WikiAdmin
WIKI_PASSW=AdminPass
WIKI_PASSW=AdminPass1
# Address of the web server
SERVER_ADDR=localhost
# SQLite database of the wiki, named DB_FILE, is located in TMP
TMP=/tmp
DB_FILE=wikidb.sqlite
# If LIGHTTPD is not set to true, the script will use the default
# web server running in WIKI_DIR_INST.
WIKI_DIR_INST=/var/www
@ -28,10 +24,17 @@ WEB=WEB
WEB_TMP=$WEB/tmp
WEB_WWW=$WEB/www
# Where our configuration for the wiki is located
FILES_FOLDER=mediawiki
FILES_FOLDER_DOWNLOAD=$FILES_FOLDER/download
FILES_FOLDER_DB=$FILES_FOLDER/db
FILES_FOLDER_POST_INSTALL_DB=$FILES_FOLDER/post-install-db
# The variables below are used by the script to install a wiki.
# You should not modify these unless you are modifying the script itself.
# tested versions: 1.19.X -> 1.21.1
MW_VERSION_MAJOR=1.21
MW_VERSION_MINOR=1
FILES_FOLDER=install-wiki
DB_INSTALL_SCRIPT=db_install.php
# tested versions: 1.19.X -> 1.21.1 -> 1.34.2
#
# See https://www.mediawiki.org/wiki/Download for what the latest
# version is.
MW_VERSION_MAJOR=1.34
MW_VERSION_MINOR=2

View file

@ -25,14 +25,16 @@ ASCIIDOC_HTML = xhtml11
ASCIIDOC_DOCBOOK = docbook
ASCIIDOC_EXTRA =
XMLTO = xmlto
XMLTO_EXTRA =
ifdef USE_ASCIIDOCTOR
ASCIIDOC = asciidoctor
ASCIIDOC_CONF =
ASCIIDOC_HTML = xhtml5
ASCIIDOC_DOCBOOK = docbook45
ASCIIDOC_DOCBOOK = docbook
ASCIIDOC_EXTRA += -I../../Documentation -rasciidoctor-extensions
ASCIIDOC_EXTRA += -alitdd='&\#x2d;&\#x2d;'
XMLTO_EXTRA += --skip-validation
endif
ifndef SHELL_PATH
@ -78,7 +80,7 @@ install-html: $(GIT_SUBTREE_HTML)
$(INSTALL) -m 644 $^ $(DESTDIR)$(htmldir)
$(GIT_SUBTREE_DOC): $(GIT_SUBTREE_XML)
$(XMLTO) -m $(MANPAGE_XSL) man $^
$(XMLTO) -m $(MANPAGE_XSL) $(XMLTO_EXTRA) man $^
$(GIT_SUBTREE_XML): $(GIT_SUBTREE_TXT)
$(ASCIIDOC) -b $(ASCIIDOC_DOCBOOK) -d manpage $(ASCIIDOC_CONF) \

View file

@ -139,12 +139,12 @@ OPTIONS
-m <message>::
--message=<message>::
This option is only valid for add, merge and pull (unsure).
This option is only valid for add, merge, pull, and split --rejoin.
Specify <message> as the commit message for the merge commit.
OPTIONS FOR add, merge, push, pull
----------------------------------
OPTIONS FOR add, merge, and pull
--------------------------------
--squash::
This option is only valid for add, merge, and pull
commands.

View file

@ -1,4 +0,0 @@
/*.xml
/*.1
/*.html
/svn-fe

View file

@ -1,105 +0,0 @@
all:: svn-fe$X
CC = cc
RM = rm -f
MV = mv
CFLAGS = -g -O2 -Wall
LDFLAGS =
EXTLIBS = -lz
include ../../config.mak.uname
-include ../../config.mak.autogen
-include ../../config.mak
ifeq ($(uname_S),Darwin)
ifndef NO_FINK
ifeq ($(shell test -d /sw/lib && echo y),y)
CFLAGS += -I/sw/include
LDFLAGS += -L/sw/lib
endif
endif
ifndef NO_DARWIN_PORTS
ifeq ($(shell test -d /opt/local/lib && echo y),y)
CFLAGS += -I/opt/local/include
LDFLAGS += -L/opt/local/lib
endif
endif
endif
ifndef NO_OPENSSL
EXTLIBS += -lssl
ifdef NEEDS_CRYPTO_WITH_SSL
EXTLIBS += -lcrypto
endif
endif
ifndef NO_PTHREADS
CFLAGS += $(PTHREADS_CFLAGS)
EXTLIBS += $(PTHREAD_LIBS)
endif
ifdef HAVE_CLOCK_GETTIME
CFLAGS += -DHAVE_CLOCK_GETTIME
EXTLIBS += -lrt
endif
ifdef NEEDS_LIBICONV
EXTLIBS += -liconv
endif
GIT_LIB = ../../libgit.a
VCSSVN_LIB = ../../vcs-svn/lib.a
XDIFF_LIB = ../../xdiff/lib.a
LIBS = $(VCSSVN_LIB) $(GIT_LIB) $(XDIFF_LIB)
QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
QUIET_SUBDIR1 =
ifneq ($(findstring $(MAKEFLAGS),w),w)
PRINT_DIR = --no-print-directory
else # "make -w"
NO_SUBDIR = :
endif
ifneq ($(findstring $(MAKEFLAGS),s),s)
ifndef V
QUIET_CC = @echo ' ' CC $@;
QUIET_LINK = @echo ' ' LINK $@;
QUIET_SUBDIR0 = +@subdir=
QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
$(MAKE) $(PRINT_DIR) -C $$subdir
endif
endif
svn-fe$X: svn-fe.o $(VCSSVN_LIB) $(XDIFF_LIB) $(GIT_LIB)
$(QUIET_LINK)$(CC) $(CFLAGS) $(LDFLAGS) $(EXTLIBS) -o $@ svn-fe.o $(LIBS)
svn-fe.o: svn-fe.c ../../vcs-svn/svndump.h
$(QUIET_CC)$(CC) $(CFLAGS) -I../../vcs-svn -o $*.o -c $<
svn-fe.html: svn-fe.txt
$(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
MAN_TXT=../contrib/svn-fe/svn-fe.txt \
../contrib/svn-fe/$@
svn-fe.1: svn-fe.txt
$(QUIET_SUBDIR0)../../Documentation $(QUIET_SUBDIR1) \
MAN_TXT=../contrib/svn-fe/svn-fe.txt \
../contrib/svn-fe/$@
$(MV) ../../Documentation/svn-fe.1 .
../../vcs-svn/lib.a: FORCE
$(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) vcs-svn/lib.a
../../xdiff/lib.a: FORCE
$(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) xdiff/lib.a
../../libgit.a: FORCE
$(QUIET_SUBDIR0)../.. $(QUIET_SUBDIR1) libgit.a
clean:
$(RM) svn-fe$X svn-fe.o svn-fe.html svn-fe.xml svn-fe.1
.PHONY: all clean FORCE

View file

@ -1,18 +0,0 @@
/*
* This file is in the public domain.
* You may freely use, modify, distribute, and relicense it.
*/
#include <stdlib.h>
#include "svndump.h"
int main(int argc, char **argv)
{
if (svndump_init(NULL))
return 1;
svndump_read((argc > 1) ? argv[1] : NULL, "refs/heads/master",
"refs/notes/svn/revs");
svndump_deinit();
svndump_reset();
return 0;
}

View file

@ -1,71 +0,0 @@
svn-fe(1)
=========
NAME
----
svn-fe - convert an SVN "dumpfile" to a fast-import stream
SYNOPSIS
--------
[verse]
mkfifo backchannel &&
svnadmin dump --deltas REPO |
svn-fe [url] 3<backchannel |
git fast-import --cat-blob-fd=3 3>backchannel
DESCRIPTION
-----------
Converts a Subversion dumpfile into input suitable for
git-fast-import(1) and similar importers. REPO is a path to a
Subversion repository mirrored on the local disk. Remote Subversion
repositories can be mirrored on local disk using the `svnsync`
command.
Note: this tool is very young. The details of its commandline
interface may change in backward incompatible ways.
INPUT FORMAT
------------
Subversion's repository dump format is documented in full in
`notes/dump-load-format.txt` from the Subversion source tree.
Files in this format can be generated using the 'svnadmin dump' or
'svk admin dump' command.
OUTPUT FORMAT
-------------
The fast-import format is documented by the git-fast-import(1)
manual page.
NOTES
-----
Subversion dumps do not record a separate author and committer for
each revision, nor do they record a separate display name and email
address for each author. Like git-svn(1), 'svn-fe' will use the name
---------
user <user@UUID>
---------
as committer, where 'user' is the value of the `svn:author` property
and 'UUID' the repository's identifier.
To support incremental imports, 'svn-fe' puts a `git-svn-id` line at
the end of each commit log message if passed a URL on the command
line. This line has the form `git-svn-id: URL@REVNO UUID`.
The resulting repository will generally require further processing
to put each project in its own repository and to separate the history
of each branch. The 'git filter-branch --subdirectory-filter' command
may be useful for this purpose.
BUGS
----
Empty directories and unknown properties are silently discarded.
The exit status does not reflect whether an error was detected.
SEE ALSO
--------
git-svn(1), svn2git(1), svk(1), git-filter-branch(1), git-fast-import(1),
https://svn.apache.org/repos/asf/subversion/trunk/notes/dump-load-format.txt

View file

@ -1,68 +0,0 @@
#!/usr/bin/python
"""
Simulates svnrdump by replaying an existing dump from a file, taking care
of the specified revision range.
To simulate incremental imports the environment variable SVNRMAX can be set
to the highest revision that should be available.
"""
import sys
import os
if sys.hexversion < 0x02040000:
# The limiter is the ValueError() calls. This may be too conservative
sys.stderr.write("svnrdump-sim.py: requires Python 2.4 or later.\n")
sys.exit(1)
def getrevlimit():
var = 'SVNRMAX'
if var in os.environ:
return os.environ[var]
return None
def writedump(url, lower, upper):
if url.startswith('sim://'):
filename = url[6:]
if filename[-1] == '/':
filename = filename[:-1] # remove terminating slash
else:
raise ValueError('sim:// url required')
f = open(filename, 'r')
state = 'header'
wroterev = False
while(True):
l = f.readline()
if l == '':
break
if state == 'header' and l.startswith('Revision-number: '):
state = 'prefix'
if state == 'prefix' and l == 'Revision-number: %s\n' % lower:
state = 'selection'
if not upper == 'HEAD' and state == 'selection' and \
l == 'Revision-number: %s\n' % upper:
break
if state == 'header' or state == 'selection':
if state == 'selection':
wroterev = True
sys.stdout.write(l)
return wroterev
if __name__ == "__main__":
if not (len(sys.argv) in (3, 4, 5)):
print("usage: %s dump URL -rLOWER:UPPER")
sys.exit(1)
if not sys.argv[1] == 'dump':
raise NotImplementedError('only "dump" is suppported.')
url = sys.argv[2]
r = ('0', 'HEAD')
if len(sys.argv) == 4 and sys.argv[3][0:2] == '-r':
r = sys.argv[3][2:].lstrip().split(':')
if not getrevlimit() is None:
r[1] = getrevlimit()
if writedump(url, r[0], r[1]):
ret = 0
else:
ret = 1
sys.exit(ret)