diff --git a/dev-docs/pmc-chair.adoc b/dev-docs/pmc-chair.adoc index 9c0c9b0e5f44..f0b0393325cc 100644 --- a/dev-docs/pmc-chair.adoc +++ b/dev-docs/pmc-chair.adoc @@ -48,7 +48,7 @@ Once their ICLA is on file, the Infra team will set up their account and grant t If the person already has an ICLA on file, anyone on the PMC can go to the https://whimsy.apache.org/roster/committee/lucene[Lucene roster in Whimsy] and add the person. -See also http://apache.org/dev/pmc.html#noncommitter for details on the process. +See also https://apache.org/dev/pmc.html#noncommitter for details on the process. === Jira and Confluence Permissions @@ -70,7 +70,7 @@ The board will not respond. After 72 hours, check that the mail appears in the B Once the 72 hours has passed, the Chair can go to the https://whimsy.apache.org/roster/committee/lucene[Lucene roster in Whimsy] and change the person from a Committer to a member of the PMC. -See also: http://www.apache.org/dev/pmc.html#newpmc. +See also: https://www.apache.org/dev/pmc.html#newpmc. == Security Issues diff --git a/dev-tools/doap/solr.rdf b/dev-tools/doap/solr.rdf index 899f036a1f39..878a693dc6f0 100644 --- a/dev-tools/doap/solr.rdf +++ b/dev-tools/doap/solr.rdf @@ -27,7 +27,7 @@ Note that the canonical URL may redirect to other non-canonical locations. --> - + 2006-01-17 Apache Solr diff --git a/dev-tools/scripts/README.md b/dev-tools/scripts/README.md index 94e696451775..8eb3169b305a 100644 --- a/dev-tools/scripts/README.md +++ b/dev-tools/scripts/README.md @@ -92,25 +92,6 @@ of the other tools in this folder. Example usage for a Release Manager: python3 -u dev-tools/scripts/buildAndPushRelease.py --push-local /tmp/releases/6.0.1 --sign 6E68DA61 --rc-num 1 -### addBackcompatIndexes.py - - usage: addBackcompatIndexes.py [-h] [--force] [--no-cleanup] [--temp-dir DIR] - version - - Add backcompat index and test for new version. See: - http://wiki.apache.org/lucene-java/ReleaseTodo#Generate_Backcompat_Indexes - - positional arguments: - version Version to add, of the form X.Y.Z - - optional arguments: - -h, --help show this help message and exit - --force Redownload the version and rebuild, even if it already - exists - --no-cleanup Do not cleanup the built indexes, so that they can be reused - for adding to another branch - --temp-dir DIR Temp directory to build backcompat indexes within - ### addVersion.py usage: addVersion.py [-h] version diff --git a/dev-tools/scripts/addBackcompatIndexes.py b/dev-tools/scripts/addBackcompatIndexes.py deleted file mode 100755 index 289839bc7f90..000000000000 --- a/dev-tools/scripts/addBackcompatIndexes.py +++ /dev/null @@ -1,272 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -# For usage information, see: -# -# http://wiki.apache.org/lucene-java/ReleaseTodo#Generate_Backcompat_Indexes - - -import os -import sys -sys.path.append(os.path.dirname(__file__)) -import scriptutil - -import argparse -import urllib.error -import urllib.request -import re -import shutil - -def create_and_add_index(source, indextype, index_version, current_version, temp_dir): - if not current_version.is_back_compat_with(index_version): - prefix = 'unsupported' - else: - prefix = { - 'cfs': 'index', - 'nocfs': 'index', - 'sorted': 'sorted', - 'moreterms': 'moreterms', - 'dvupdates': 'dvupdates', - 'emptyIndex': 'empty' - }[indextype] - if indextype in ('cfs', 'nocfs'): - dirname = 'index.%s' % indextype - filename = '%s.%s-%s.zip' % (prefix, index_version, indextype) - else: - dirname = indextype - filename = '%s.%s.zip' % (prefix, index_version) - - print(' creating %s...' % filename, end='', flush=True) - module = 'backward-codecs' - index_dir = os.path.join('lucene', module, 'src/test/org/apache/lucene/backward_index') - test_file = os.path.join(index_dir, filename) - if os.path.exists(os.path.join(index_dir, filename)): - print('uptodate') - return - - test = { - 'cfs': 'testCreateCFS', - 'nocfs': 'testCreateNoCFS', - 'sorted': 'testCreateSortedIndex', - 'moreterms': 'testCreateMoreTermsIndex', - 'dvupdates': 'testCreateIndexWithDocValuesUpdates', - 'emptyIndex': 'testCreateEmptyIndex' - }[indextype] - ant_args = ' '.join([ - '-Dtests.bwcdir=%s' % temp_dir, - '-Dtests.codec=default', - '-Dtests.useSecurityManager=false', - '-Dtestcase=TestBackwardsCompatibility', - '-Dtestmethod=%s' % test - ]) - base_dir = os.getcwd() - bc_index_dir = os.path.join(temp_dir, dirname) - bc_index_file = os.path.join(bc_index_dir, filename) - - if os.path.exists(bc_index_file): - print('alreadyexists') - else: - if os.path.exists(bc_index_dir): - shutil.rmtree(bc_index_dir) - os.chdir(os.path.join(source, module)) - scriptutil.run('ant test %s' % ant_args) - os.chdir(bc_index_dir) - scriptutil.run('zip %s *' % filename) - print('done') - - print(' adding %s...' % filename, end='', flush=True) - scriptutil.run('cp %s %s' % (bc_index_file, os.path.join(base_dir, index_dir))) - os.chdir(base_dir) - scriptutil.run('rm -rf %s' % bc_index_dir) - print('done') - -def update_backcompat_tests(types, index_version, current_version): - print(' adding new indexes %s to backcompat tests...' % types, end='', flush=True) - module = 'lucene/backward-codecs' - filename = '%s/src/test/org/apache/lucene/backward_index/TestBackwardsCompatibility.java' % module - if not current_version.is_back_compat_with(index_version): - matcher = re.compile(r'final String\[\] unsupportedNames = {|};') - elif 'sorted' in types: - matcher = re.compile(r'static final String\[\] oldSortedNames = {|};') - else: - matcher = re.compile(r'static final String\[\] oldNames = {|};') - - strip_dash_suffix_re = re.compile(r'-.*') - - def find_version(x): - x = x.strip() - x = re.sub(strip_dash_suffix_re, '', x) # remove the -suffix if any - return scriptutil.Version.parse(x) - - class Edit(object): - start = None - def __call__(self, buffer, match, line): - if self.start: - # find where this version should exist - i = len(buffer) - 1 - previous_version_exists = not ('};' in line and buffer[-1].strip().endswith("{")) - if previous_version_exists: # Only look if there is a version here - v = find_version(buffer[i]) - while i >= self.start and v.on_or_after(index_version): - i -= 1 - v = find_version(buffer[i]) - i += 1 # readjust since we skipped past by 1 - - # unfortunately python doesn't have a range remove from list... - # here we want to remove any previous references to the version we are adding - while i < len(buffer) and index_version.on_or_after(find_version(buffer[i])): - buffer.pop(i) - - if i == len(buffer) and previous_version_exists and not buffer[-1].strip().endswith(","): - # add comma - buffer[-1] = buffer[-1].rstrip() + ",\n" - - if previous_version_exists: - last = buffer[-1] - spaces = ' ' * (len(last) - len(last.lstrip())) - else: - spaces = ' ' - for (j, t) in enumerate(types): - if t == 'sorted': - newline = spaces + ('"sorted.%s"') % index_version - else: - newline = spaces + ('"%s-%s"' % (index_version, t)) - if j < len(types) - 1 or i < len(buffer): - newline += ',' - buffer.insert(i, newline + '\n') - i += 1 - - buffer.append(line) - return True - - if 'Names = {' in line: - self.start = len(buffer) # location of first index name - buffer.append(line) - return False - - changed = scriptutil.update_file(filename, matcher, Edit()) - print('done' if changed else 'uptodate') - -def check_backcompat_tests(): - print(' checking backcompat tests...', end='', flush=True) - scriptutil.run('./gradlew -p lucene/backward-codecs test --tests TestBackwardsCompatibility') - print('ok') - -def download_from_mirror(version, remotename, localname): - url = 'http://apache.cs.utah.edu/lucene/java/%s/%s' % (version, remotename) - try: - urllib.request.urlretrieve(url, localname) - return True - except urllib.error.URLError as e: - if e.code == 404: - return False - raise e - -def download_from_archives(version, remotename, localname): - url = 'http://archive.apache.org/dist/lucene/java/%s/%s' % (version, remotename) - try: - urllib.request.urlretrieve(url, localname) - return True - except urllib.error.URLError as e: - if e.code == 404: - return False - raise e - -def download_release(version, temp_dir, force): - print(' downloading %s source release...' % version, end='', flush=True) - source = os.path.join(temp_dir, 'lucene-%s' % version) - if os.path.exists(source): - if force: - shutil.rmtree(source) - else: - print('uptodate') - return source - - filename = 'lucene-%s-src.tgz' % version - source_tgz = os.path.join(temp_dir, filename) - if not download_from_mirror(version, filename, source_tgz) and \ - not download_from_archives(version, filename, source_tgz): - raise Exception('Could not find version %s in apache mirror or archives' % version) - - olddir = os.getcwd() - os.chdir(temp_dir) - scriptutil.run('tar -xvzf %s' % source_tgz) - os.chdir(olddir) - print('done') - return source - -def read_config(): - parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, - description='''\ -Add backcompat index and test for new version. See: -http://wiki.apache.org/lucene-java/ReleaseTodo#Generate_Backcompat_Indexes -''') - parser.add_argument('--force', action='store_true', default=False, - help='Redownload the version and rebuild, even if it already exists') - parser.add_argument('--no-cleanup', dest='cleanup', action='store_false', default=True, - help='Do not cleanup the built indexes, so that they can be reused ' + - 'for adding to another branch') - parser.add_argument('--temp-dir', metavar='DIR', default='/tmp/lucenebwc', - help='Temp directory to build backcompat indexes within') - parser.add_argument('version', type=scriptutil.Version.parse, - help='Version to add, of the form X.Y.Z') - c = parser.parse_args() - - return c - -def main(): - c = read_config() - if not os.path.exists(c.temp_dir): - os.makedirs(c.temp_dir) - - print('\nCreating backwards compatibility indexes') - source = download_release(c.version, c.temp_dir, c.force) - current_version = scriptutil.Version.parse(scriptutil.find_current_version()) - create_and_add_index(source, 'cfs', c.version, current_version, c.temp_dir) - create_and_add_index(source, 'nocfs', c.version, current_version, c.temp_dir) - should_make_sorted = current_version.is_back_compat_with(c.version) \ - and (c.version.major > 6 or (c.version.major == 6 and c.version.minor >= 2)) - if should_make_sorted: - create_and_add_index(source, 'sorted', c.version, current_version, c.temp_dir) - if c.version.minor == 0 and c.version.bugfix == 0 and c.version.major < current_version.major: - create_and_add_index(source, 'moreterms', c.version, current_version, c.temp_dir) - create_and_add_index(source, 'dvupdates', c.version, current_version, c.temp_dir) - create_and_add_index(source, 'emptyIndex', c.version, current_version, c.temp_dir) - print ('\nMANUAL UPDATE REQUIRED: edit TestBackwardsCompatibility to enable moreterms, dvupdates, and empty index testing') - - print('\nAdding backwards compatibility tests') - update_backcompat_tests(['cfs', 'nocfs'], c.version, current_version) - if should_make_sorted: - update_backcompat_tests(['sorted'], c.version, current_version) - - print('\nTesting changes') - check_backcompat_tests() - - if c.cleanup: - print('\nCleaning up') - print(' deleting %s...' % c.temp_dir, end='', flush=True) - shutil.rmtree(c.temp_dir) - print('done') - - print() - -if __name__ == '__main__': - try: - main() - except KeyboardInterrupt: - print('\nRecieved Ctrl-C, exiting early') diff --git a/dev-tools/scripts/releaseWizard.py b/dev-tools/scripts/releaseWizard.py index abd670eaa743..8ec5a4979508 100755 --- a/dev-tools/scripts/releaseWizard.py +++ b/dev-tools/scripts/releaseWizard.py @@ -1158,7 +1158,7 @@ def configure_pgp(gpg_todo): return False if length < 4096: print("Your key length is < 4096, Please generate a stronger key.") - print("Alternatively, follow instructions in http://www.apache.org/dev/release-signing.html#note") + print("Alternatively, follow instructions in https://infra.apache.org/release-signing.html#note") if not ask_yes_no("Have you configured your gpg to avoid SHA-1?"): print("Please either generate a strong key or reconfigure your client") return False @@ -1181,7 +1181,7 @@ def configure_pgp(gpg_todo): if apache_sigs < 1: print(textwrap.dedent("""\ Your key is not signed by any other committer. - Please review http://www.apache.org/dev/openpgp.html#apache-wot + Please review https://infra.apache.org/openpgp.html#apache-wot and make sure to get your key signed until next time. You may want to run 'gpg --refresh-keys' to refresh your keychain.""")) uses_apacheid = is_code_signing_key = False @@ -1191,9 +1191,9 @@ def configure_pgp(gpg_todo): if 'CODE SIGNING KEY' in line.upper(): is_code_signing_key = True if not uses_apacheid: - print("WARNING: Your key should use your apache-id email address, see http://www.apache.org/dev/release-signing.html#user-id") + print("WARNING: Your key should use your apache-id email address, see https://infra.apache.org/release-signing.html#user-id") if not is_code_signing_key: - print("WARNING: You code signing key should be labeled 'CODE SIGNING KEY', see http://www.apache.org/dev/release-signing.html#key-comment") + print("WARNING: You code signing key should be labeled 'CODE SIGNING KEY', see https://infra.apache.org/release-signing.html#key-comment") except Exception as e: print("Could not check signatures of your key: %s" % e) diff --git a/dev-tools/scripts/releaseWizard.yaml b/dev-tools/scripts/releaseWizard.yaml index b00754f0be95..29db1bc0ca55 100644 --- a/dev-tools/scripts/releaseWizard.yaml +++ b/dev-tools/scripts/releaseWizard.yaml @@ -280,10 +280,10 @@ groups: voting rules, create a PGP/GPG key for use with signing and more. Please familiarise yourself with the resources listed below. links: - - http://www.apache.org/dev/release-publishing.html - - http://www.apache.org/legal/release-policy.html - - http://www.apache.org/dev/release-signing.html - - https://wiki.apache.org/lucene-java/ReleaseTodo + - https://infra.apache.org/release-publishing.html + - https://www.apache.org/legal/release-policy.html + - https://infra.apache.org/release-signing.html + - https://cwiki.apache.org/confluence/display/LUCENE/ReleaseTodo - !Todo id: tools title: Necessary tools are installed @@ -321,8 +321,8 @@ groups: fingerprint for the key. function: configure_pgp links: - - http://www.apache.org/dev/release-signing.html - - http://www.apache.org/dev/openpgp.html#apache-wot + - https://infra.apache.org/release-signing.html + - https://infra.apache.org/openpgp.html#apache-wot - https://id.apache.org - https://dist.apache.org/repos/dist/release/lucene/KEYS - !Todo @@ -1041,10 +1041,9 @@ groups: description: | Ensure your refrigerator has at least 2 beers - the svn import operation can take a while, depending on your upload bandwidth. We'll publish this directly to the production tree. - At the end of the task, the two links below shall work. + At the end of the task, the link below shall work. links: - - http://lucene.apache.org/core/{{ version }} - - http://lucene.apache.org/solr/{{ version }} + - https://solr.apache.org/{{ version }} vars: release_tag: releases/lucene-solr/{{ release_version }} version: "{{ release_version_major }}_{{ release_version_minor }}_{{ release_version_bugfix }}" @@ -1250,15 +1249,13 @@ groups: post_description: | Wait a few minutes for the build to happen. You can follow the site build at https://ci2.apache.org/#/builders/3 - Verify on https://lucene.apache.org that the site is OK. + Verify on https://solr.apache.org that the site is OK. - You can now also verify that http://lucene.apache.org/solr/api/solr-core/ and http://lucene.apache.org/core/api/core/ - redirects to the latest version + You can now also verify that https://solr.apache.org/api/solr-core/ redirects to the latest version links: - https://ci2.apache.org/#/builders/3 - - https://lucene.apache.org - - http://lucene.apache.org/solr/api/solr-core/ - - http://lucene.apache.org/core/api/core/ + - https://solr.apache.org + - https://solr.apache.org/api/solr-core/ - !Todo id: update_doap title: Update the DOAP files @@ -1522,114 +1519,6 @@ groups: - !Command cmd: git add -u . && git commit -m "Add next bugfix version {{ next_version }}" && git push logfile: commit-stable.log - - !Todo - id: backcompat_release - title: Generate Backcompat Indexes for release branch - description: | - After each version of Lucene is released, compressed CFS, non-CFS, and sorted indexes created with - the newly released version are added to `lucene/backwards-codecs/src/test/org/apache/lucene/index/`, - for use in testing backward index compatibility via org.apache.lucene.index.TestBackwardsCompatibility, - which is also located under the `backwards-codecs/` module. There are also three indexes created only - with major Lucene versions: moreterms, empty, and dvupdates. These indexes are created via methods - on `TestBackwardsCompatibility` itself - see comments in the source for more information. - - There is a script (`dev-tools/scripts/addBackcompatIndexes.py`) that automates most of the process. - It downloads the source for the specified release; generates indexes for the current release using - `TestBackwardsCompatibility`; compresses the indexes and places them in the correct place in the source - tree; modifies TestBackwardsCompatibility.java to include the generated indexes in the list of indexes - to test; and then runs `TestBackwardsCompatibility`. - - In this and the next two steps we'll guide you through using this tool on each of the branches. - depends: - - increment_release_version - vars: - temp_dir: "{{ [release_folder, 'backcompat'] | path_join }}" - commands: !Commands - root_folder: '{{ git_checkout_folder }}' - commands_text: Run these commands to add back-compat indices to release branch - commands: - - !Command - cmd: git checkout {{ release_branch }} && git pull --ff-only && git clean -df && git checkout -- . - tee: true - logfile: checkout.log - - !Command - cmd: "{{ gradle_cmd }} clean" - - !Command - cmd: python3 -u dev-tools/scripts/addBackcompatIndexes.py --no-cleanup --temp-dir {{ temp_dir }} {{ release_version }} && git add lucene/backward-codecs/src/test/org/apache/lucene/index/ - logfile: add-backcompat.log - - !Command - cmd: git diff --staged - comment: Check the git diff before committing - tee: true - - !Command - cmd: git commit -m "Add back-compat indices for {{ release_version }}" && git push - logfile: commit.log - - !Todo - id: backcompat_stable - title: Generate Backcompat Indexes for stable branch - description: | - Now generate back-compat for stable branch ({{ stable_branch }}) - depends: - - increment_release_version - vars: - temp_dir: "{{ [release_folder, 'backcompat'] | path_join }}" - commands: !Commands - root_folder: '{{ git_checkout_folder }}' - commands_text: Run these commands to add back-compat indices to {{ stable_branch }} - commands: - - !Command - cmd: git checkout {{ stable_branch }} && git pull --ff-only && git clean -df && git checkout -- . - tee: true - logfile: checkout.log - - !Command - cmd: "{{ gradle_cmd }} clean" - - !Command - cmd: python3 -u dev-tools/scripts/addBackcompatIndexes.py --no-cleanup --temp-dir {{ temp_dir }} {{ release_version }} && git add lucene/backward-codecs/src/test/org/apache/lucene/index/ - logfile: add-backcompat.log - - !Command - cmd: git diff --staged - comment: Check the git diff before committing - tee: true - - !Command - cmd: git commit -m "Add back-compat indices for {{ release_version }}" && git push - logfile: commit.log - - !Todo - id: backcompat_master - title: Generate Backcompat Indexes for unstable branch - description: | - Now generate back-compat for unstable (master) branch. - Note that this time we do not specify `--no-cleanup` meaning the tmp folder will be deleted - depends: - - increment_release_version - vars: - temp_dir: "{{ [release_folder, 'backcompat'] | path_join }}" - version: "{{ set_java_home(master_version) }}" - commands: !Commands - root_folder: '{{ git_checkout_folder }}' - commands_text: Run these commands to add back-compat indices to master - commands: - - !Command - cmd: git checkout master && git pull --ff-only && git clean -df && git checkout -- . - tee: true - logfile: checkout.log - - !Command - cmd: "{{ gradle_cmd }} clean" - - !Command - cmd: python3 -u dev-tools/scripts/addBackcompatIndexes.py --temp-dir {{ temp_dir }} {{ release_version }} && git add lucene/backward-codecs/src/test/org/apache/lucene/index/ - logfile: add-backcompat.log - - !Command - cmd: git diff --staged - comment: Check the git diff before committing - tee: true - - !Command - cmd: git commit -m "Add back-compat indices for {{ release_version }}" && git push - logfile: commit.log - post_description: | - When doing a major version release, eg. 8.0.0, you might also need to reenable some - backward compatibility tests for corner cases. To find them, run grep -r assume - lucene/backward-codecs/, which should find tests that have been disabled on master - because there was no released Lucene version to test against. - {{ set_java_home(release_version) }} - !Todo id: jira_release title: Mark version as released in JIRA diff --git a/gradle/maven/defaults-maven.gradle b/gradle/maven/defaults-maven.gradle index 9e41b38ca7f2..0ad4b9e8294f 100644 --- a/gradle/maven/defaults-maven.gradle +++ b/gradle/maven/defaults-maven.gradle @@ -99,7 +99,7 @@ configure(subprojects.findAll { it.path in rootProject.published }) { prj -> licenses { license { name = 'Apache 2' - url = 'http://www.apache.org/licenses/LICENSE-2.0.txt' + url = 'https://apache.org/licenses/LICENSE-2.0.txt' } } diff --git a/solr/README.md b/solr/README.md index 2401e2ba6eaa..1a9b2570bbb2 100644 --- a/solr/README.md +++ b/solr/README.md @@ -148,7 +148,7 @@ example/ dist/solr--XX.jar The Apache Solr libraries. To compile Apache Solr Plugins, one or more of these will be required. The core library is - required at a minimum. (see http://wiki.apache.org/solr/SolrPlugins + required at a minimum. (see https://solr.apache.org/guide/solr-plugins.html for more information). docs/index.html @@ -195,7 +195,7 @@ possession, use, and/or re-export to another country, of encryption software. BEFORE using any encryption software, please check your country's laws, regulations and policies concerning the import, possession, or use, and re-export of encryption software, to -see if this is permitted. See for more +see if this is permitted. See for more information. The U.S. Government Department of Commerce, Bureau of Industry and @@ -213,4 +213,4 @@ software: Apache Solr uses the Apache Tika which uses the Bouncy Castle generic encryption libraries for extracting text content and metadata from encrypted PDF files. -See http://www.bouncycastle.org/ for more details on Bouncy Castle. +See https://www.bouncycastle.org/ for more details on Bouncy Castle. diff --git a/solr/bin/install_solr_service.sh b/solr/bin/install_solr_service.sh index 6cfb6a578e3e..fddeebbc31a9 100755 --- a/solr/bin/install_solr_service.sh +++ b/solr/bin/install_solr_service.sh @@ -79,7 +79,7 @@ for command in "grep -E \"^NAME=\" /etc/os-release" \ distro=SUSE elif [[ ${distro_string,,} == *"darwin"* ]]; then echo "Sorry, this script does not support macOS. You'll need to setup Solr as a service manually using the documentation provided in the Solr Reference Guide." - echo "You could also try installing via Homebrew (http://brew.sh/), e.g. brew install solr" + echo "You could also try installing via Homebrew (https://brew.sh/), e.g. brew install solr" exit 1 fi if [[ $distro ]] ; then break ; fi diff --git a/solr/bin/post b/solr/bin/post index 4c629d105937..25c30ea7cf83 100755 --- a/solr/bin/post +++ b/solr/bin/post @@ -94,7 +94,7 @@ function print_usage() { echo "* XML files: $THIS_SCRIPT -c records article*.xml" echo "* CSV file: $THIS_SCRIPT -c signals LATEST-signals.csv" echo "* Directory of files: $THIS_SCRIPT -c myfiles ~/Documents" - echo "* Web crawl: $THIS_SCRIPT -c gettingstarted http://lucene.apache.org/solr -recursive 1 -delay 1" + echo "* Web crawl: $THIS_SCRIPT -c gettingstarted https://solr.apache.org/ -recursive 1 -delay 1" echo "* Standard input (stdin): echo '{"commit": {}}' | $THIS_SCRIPT -c my_collection -type application/json -out yes -d" echo "* Data as string: $THIS_SCRIPT -c signals -type text/csv -out yes -d $'id,value\n1,0.47'" echo "" @@ -115,7 +115,7 @@ ARGS=() while [ $# -gt 0 ]; do # TODO: natively handle the optional parameters to SPT - # but for now they can be specified as bin/post -c collection-name delay=5 http://lucidworks.com + # but for now they can be specified as bin/post -c collection-name delay=5 https://lucidworks.com if [[ -d "$1" ]]; then # Directory diff --git a/solr/bin/solr b/solr/bin/solr index f28fb6c46697..be107b48dbbe 100755 --- a/solr/bin/solr +++ b/solr/bin/solr @@ -2034,7 +2034,7 @@ if [ "$GC_LOG_OPTS" != "" ]; then GC_LOG_OPTS+=($JAVA8_GC_LOG_FILE_OPTS) fi else - # http://openjdk.java.net/jeps/158 + # https://openjdk.java.net/jeps/158 for i in "${!GC_LOG_OPTS[@]}"; do # for simplicity, we only look at the prefix '-Xlog:gc' diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java index 575d9082b9b4..9fcf5caadac1 100644 --- a/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java +++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/update/processor/OpenNLPExtractNamedEntitiesUpdateProcessorFactory.java @@ -74,7 +74,7 @@ * </fieldType> * * - *

See the OpenNLP website + *

See the OpenNLP website * for information on downloading pre-trained models.

* * Note that in order to use model files larger than 1MB on SolrCloud, diff --git a/solr/contrib/extraction/README.md b/solr/contrib/extraction/README.md index f3fdb5c7fb5b..1e425ca09957 100644 --- a/solr/contrib/extraction/README.md +++ b/solr/contrib/extraction/README.md @@ -7,11 +7,11 @@ Introduction Apache Solr Extraction provides a means for extracting and indexing content contained in "rich" documents, such as Microsoft Word, Adobe PDF, etc. (Each name is a trademark of their respective owners) This contrib module uses Apache Tika to extract content and metadata from the files, which can then be indexed. For more information, -see http://wiki.apache.org/solr/ExtractingRequestHandler +see https://solr.apache.org/guide/uploading-data-with-solr-cell-using-apache-tika.html Getting Started --------------- You will need Solr up and running. Then, simply add the extraction JAR file, plus the Tika dependencies (in the ./lib folder) -to your Solr Home lib directory. See http://wiki.apache.org/solr/ExtractingRequestHandler for more details on hooking it in +to your Solr Home lib directory. See https://solr.apache.org/guide/uploading-data-with-solr-cell-using-apache-tika.html for more details on hooking it in and configuring. diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java index f74e7b264c52..75a4a25784e8 100644 --- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java +++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java @@ -191,7 +191,7 @@ public void load(SolrQueryRequest req, SolrQueryResponse rsp, if (xpathExpr != null) { Matcher matcher = PARSER.parse(xpathExpr); - serializer.startDocument();//The MatchingContentHandler does not invoke startDocument. See http://tika.markmail.org/message/kknu3hw7argwiqin + serializer.startDocument();//The MatchingContentHandler does not invoke startDocument. See https://lists.apache.org/thread.html/5ec63e104e564a2363e45f74d5aced6520b7d32b4b625762ef56cb86%401226775505%40%3Cdev.tika.apache.org%3E parsingHandler = new MatchingContentHandler(serializer, matcher); } else { parsingHandler = serializer; diff --git a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java index 8b4161ae8b0f..e1e6fa3a5416 100644 --- a/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java +++ b/solr/contrib/langid/src/java/org/apache/solr/update/processor/LangDetectLanguageIdentifierUpdateProcessor.java @@ -34,9 +34,10 @@ import org.slf4j.LoggerFactory; /** - * Identifies the language of a set of input fields using http://code.google.com/p/language-detection + * Identifies the language of a set of input fields using https://github.com/shuyo/language-detection *

- * See http://wiki.apache.org/solr/LanguageDetection + * See Detecting Languages During + * Indexing in the Solr Ref Guide * @since 3.5 */ public class LangDetectLanguageIdentifierUpdateProcessor extends LanguageIdentifierUpdateProcessor { diff --git a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java index 67b60090536a..be091a9d74e2 100644 --- a/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java +++ b/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java @@ -715,7 +715,7 @@ private static List toListOfStringDests(List raw) { @Override public String getDescription() { - return "Lucene Index Browser. Inspired and modeled after Luke: http://www.getopt.org/luke/"; + return "Lucene Index Browser. Inspired and modeled after Luke: https://code.google.com/archive/p/luke/"; } @Override diff --git a/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java b/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java index 202696f36526..2b44365bbc9d 100644 --- a/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java +++ b/solr/core/src/java/org/apache/solr/servlet/CheckLoggingConfiguration.java @@ -29,7 +29,7 @@ static void check() { throw new NoClassDefFoundError("Failed to initialize Apache Solr: " +"Could not find necessary SLF4j logging jars. If using Jetty, the SLF4j logging jars need to go in " +"the jetty lib/ext directory. For other containers, the corresponding directory should be used. " - +"For more information, see: http://wiki.apache.org/solr/SolrLogging"); + +"For more information, see: https://cwiki.apache.org/confluence/display/solr/SolrLogging#SolrLogging-UsingtheexampleloggingsetupincontainersotherthanJetty"); } } diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java index 2592950c9967..96cc66dd2c88 100644 --- a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java +++ b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java @@ -213,7 +213,7 @@ private SolrQueryRequest buildRequestFrom(SolrCore core, SolrParams params, Coll strs = params.getParams( CommonParams.STREAM_FILE ); if( strs != null ) { if( !enableRemoteStreams ) { - throw new SolrException( ErrorCode.BAD_REQUEST, "Remote Streaming is disabled. See http://lucene.apache.org/solr/guide/requestdispatcher-in-solrconfig.html for help" ); + throw new SolrException( ErrorCode.BAD_REQUEST, "Remote Streaming is disabled. See https://solr.apache.org/guide/requestdispatcher-in-solrconfig.html for help" ); } for( final String file : strs ) { ContentStreamBase stream = new ContentStreamBase.FileStream( new File(file) ); @@ -228,7 +228,7 @@ private SolrQueryRequest buildRequestFrom(SolrCore core, SolrParams params, Coll strs = params.getParams( CommonParams.STREAM_BODY ); if( strs != null ) { if( !enableStreamBody ) { - throw new SolrException( ErrorCode.BAD_REQUEST, "Stream Body is disabled. See http://lucene.apache.org/solr/guide/requestdispatcher-in-solrconfig.html for help" ); + throw new SolrException( ErrorCode.BAD_REQUEST, "Stream Body is disabled. See https://solr.apache.org/guide/requestdispatcher-in-solrconfig.html for help" ); } for( final String body : strs ) { ContentStreamBase stream = new ContentStreamBase.StringStream( body ); diff --git a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java index 54ec6b2999ee..d38ec52b99af 100644 --- a/solr/core/src/java/org/apache/solr/util/SimplePostTool.java +++ b/solr/core/src/java/org/apache/solr/util/SimplePostTool.java @@ -1137,7 +1137,7 @@ public PageFetcherResult readPageFromUrl(URL u) { } res.httpStatus = 404; HttpURLConnection conn = (HttpURLConnection) u.openConnection(); - conn.setRequestProperty("User-Agent", "SimplePostTool-crawler/"+VERSION_OF_THIS_TOOL+" (http://lucene.apache.org/solr/)"); + conn.setRequestProperty("User-Agent", "SimplePostTool-crawler/"+VERSION_OF_THIS_TOOL+" (https://solr.apache.org/)"); conn.setRequestProperty("Accept-Encoding", "gzip, deflate"); conn.connect(); res.httpStatus = conn.getResponseCode(); diff --git a/solr/documentation/src/markdown/index.template.md b/solr/documentation/src/markdown/index.template.md index e1d11be3d876..e81c10bdb9bb 100644 --- a/solr/documentation/src/markdown/index.template.md +++ b/solr/documentation/src/markdown/index.template.md @@ -1,5 +1,5 @@

- + Solr
diff --git a/solr/documentation/src/markdown/online-link.template.md b/solr/documentation/src/markdown/online-link.template.md index 345f1349859a..64f208719c9f 100644 --- a/solr/documentation/src/markdown/online-link.template.md +++ b/solr/documentation/src/markdown/online-link.template.md @@ -1,5 +1,5 @@
- + Solr
diff --git a/solr/solr-ref-guide/src/enabling-ssl.adoc b/solr/solr-ref-guide/src/enabling-ssl.adoc index bf681b20db8f..95e9cd82242b 100644 --- a/solr/solr-ref-guide/src/enabling-ssl.adoc +++ b/solr/solr-ref-guide/src/enabling-ssl.adoc @@ -300,7 +300,7 @@ C:\> bin\solr.cmd -cloud -s cloud\node1 -z server1:2181,server2:2181,server3:218 [IMPORTANT] ==== -curl on OS X Mavericks (10.9) has degraded SSL support. For more information and workarounds to allow one-way SSL, see http://curl.haxx.se/mail/archive-2013-10/0036.html. curl on OS X Yosemite (10.10) is improved - 2-way SSL is possible - see http://curl.haxx.se/mail/archive-2014-10/0053.html. +curl on OS X Mavericks (10.9) has degraded SSL support. For more information and workarounds to allow one-way SSL, see https://curl.se/mail/archive-2013-10/0036.html. curl on OS X Yosemite (10.10) is improved - 2-way SSL is possible - see https://curl.se/mail/archive-2014-10/0053.html. The curl commands in the following sections will not work with the system `curl` on OS X Yosemite (10.10). Instead, the certificate supplied with the `-E` parameter must be in PKCS12 format, and the file supplied with the `--cacert` parameter must contain only the CA certificate, and no key (see <> for instructions on creating this file): @@ -309,7 +309,7 @@ $ curl -E solr-ssl.keystore.p12:secret --cacert solr-ssl.cacert.pem ... ==== -NOTE: If your operating system does not include curl, you can download binaries here: http://curl.haxx.se/download.html +NOTE: If your operating system does not include curl, you can download binaries here: https://curl.se/download.html === Create a SolrCloud Collection using bin/solr diff --git a/solr/solr-ref-guide/src/filter-descriptions.adoc b/solr/solr-ref-guide/src/filter-descriptions.adoc index 2c059d202a0a..da285d5c3277 100644 --- a/solr/solr-ref-guide/src/filter-descriptions.adoc +++ b/solr/solr-ref-guide/src/filter-descriptions.adoc @@ -2738,7 +2738,7 @@ $ => DIGIT . => DIGIT \u002C => DIGIT -# Don't split on ZWJ: http://en.wikipedia.org/wiki/Zero-width_joiner +# Don't split on ZWJ: https://en.wikipedia.org/wiki/Zero-width_joiner \u200D => ALPHANUM ---- diff --git a/solr/solr-ref-guide/src/solr-jdbc-python-jython.adoc b/solr/solr-ref-guide/src/solr-jdbc-python-jython.adoc index 55f37de7414a..b75157f4d7a1 100644 --- a/solr/solr-ref-guide/src/solr-jdbc-python-jython.adoc +++ b/solr/solr-ref-guide/src/solr-jdbc-python-jython.adoc @@ -84,7 +84,7 @@ jython [solr_java_native.py | solr_zxjdbc.py] ---- #!/usr/bin/env jython -# http://www.jython.org/jythonbook/en/1.0/DatabasesAndJython.html +# https://jython.readthedocs.io/en/latest/DatabasesAndJython/ # https://wiki.python.org/jython/DatabaseExamples#SQLite_using_JDBC import sys @@ -118,7 +118,7 @@ if __name__ == '__main__': ---- #!/usr/bin/env jython -# http://www.jython.org/jythonbook/en/1.0/DatabasesAndJython.html +# https://jython.readthedocs.io/en/latest/DatabasesAndJython/ # https://wiki.python.org/jython/DatabaseExamples#SQLite_using_ziclix import sys