What is the best way to invoke Checkstyle from within Bazel? - java

I'm trying to add support for invoking Checkstyle as part of our Bazel build. I've seen some code using Extra Actions to accomplish that, but I'm hoping to avoid that approach and get it to work with pure Skylark code. I managed to use the following (awful) genrule to get the JVM to execute Checkstyle on a set of source files, but I realize that's incredibly hacky:
native.genrule(
name = name,
srcs = srcs,
outs = ["src_output.txt"],
cmd = "$(JAVA) -Dconfig_loc=<full-config-loc-path> -classpath <path>/checkstyle-8.4-all.jar com.puppycrawl.tools.checkstyle.Main -c <config-file-path> -o $# $(SRCS)",
**kwargs
)
Any suggestions on how to do it the right way? I already have all the necessarily JAR dependencies in our dependencies.bzl file, so I'd be happy to refer to those instead of the checkstyle-all JAR.

As discussed on IRC this is the rule that I have been using (at the end of this post). I have a directory config/ that contains my checkstyle config, suppressions and a license file which are referenced here aa default arguments. In your WORKSPACE you can pull in all the deps with a macro:
load("//tools:checkstyle.bzl", "checkstyle_repositories")
checkstyle_repositories()
In your build files import and use the rule with:
load("//tools:checkstyle.bzl", "checkstyle_test")
filegroup(
name = "java-srcs",
srcs = glob(["src/main/java/**/*.java"]),
)
checkstyle_test(
name = "check",
srcs = [
":java-srcs",
],
)
Then you can run it with bazel test //path/to/dir:check.
This rule does have the limitation that it takes the arguments on the command line so for larger modules you will need the split up the file groups to stop hitting the command line length limit, e.g.
load("//tools:checkstyle.bzl", "checkstyle_test")
filegroup(
name = "java-foo-srcs",
srcs = glob(["src/main/java/foo/**/*.java"]),
)
filegroup(
name = "java-bar-srcs",
srcs = glob(["src/main/java/bar/**/*.java"]),
)
checkstyle_test(
name = "check-foo",
srcs = [
":java-foo-srcs",
],
)
checkstyle_test(
name = "check-bar",
srcs = [
":java-bar-srcs",
],
)
test_suite(
name = "check",
tests = [
":check-bar",
":check-foo",
],
)
If you have a BUILD file per package this will likely be unnecessary though, its more an issue if you are converting a large maven module and keep a similar structure in your bazel build files.
tools/checkstyle.bzl
load("//tools/gerrit:maven_jar.bzl", "maven_jar")
def checkstyle_repositories(
omit = [],
versions = {
"antlr_antlr": "2.7.7",
"org_antlr_antlr4_runtime": "4.5.1-1",
"com_puppycrawl_tools_checkstyle": "8.2",
"commons_beanutils_commons_beanutils": "1.9.3",
"commons_cli_commons_cli": "1.4",
"commons_collections_commons_collections": "3.2.2",
"com_google_guava_guava23": "23.0",
"org_slf4j_slf4j_api": "1.7.7",
"org_slf4j_slf4j_jcl": "1.7.7",
}
):
if not "antlr_antlr" in omit:
maven_jar(
name = "antlr_antlr",
attach_source = False,
artifact = "antlr:antlr:" + versions["antlr_antlr"],
)
if not "org_antlr_antlr4_runtime" in omit:
maven_jar(
name = "org_antlr_antlr4_runtime",
artifact = "org.antlr:antlr4-runtime:" + versions["org_antlr_antlr4_runtime"],
)
if not "com_puppycrawl_tools_checkstyle" in omit:
maven_jar(
name = "com_puppycrawl_tools_checkstyle",
artifact = "com.puppycrawl.tools:checkstyle:" + versions["com_puppycrawl_tools_checkstyle"],
)
if not "commons_beanutils_commons_beanutils" in omit:
maven_jar(
name = "commons_beanutils_commons_beanutils",
artifact = "commons-beanutils:commons-beanutils:" + versions["commons_beanutils_commons_beanutils"],
)
if not "commons_cli_commons_cli" in omit:
maven_jar(
name = "commons_cli_commons_cli",
artifact = "commons-cli:commons-cli:" + versions["commons_cli_commons_cli"],
)
if not "commons_collections_commons_collections" in omit:
maven_jar(
name = "commons_collections_commons_collections",
artifact = "commons-collections:commons-collections:" + versions["commons_collections_commons_collections"],
)
if not "com_google_guava_guava23" in omit:
maven_jar(
name = "com_google_guava_guava23",
artifact = "com.google.guava:guava:" + versions["com_google_guava_guava23"],
)
if not "org_slf4j_slf4j_api" in omit:
maven_jar(
name = "org_slf4j_slf4j_api",
artifact = "org.slf4j:slf4j-api:" + versions["org_slf4j_slf4j_api"],
)
if not "org_slf4j_slf4j_jcl" in omit:
maven_jar(
name = "org_slf4j_slf4j_jcl",
artifact = "org.slf4j:jcl-over-slf4j:" + versions["org_slf4j_slf4j_jcl"],
)
def _checkstyle_test_impl(ctx):
name = ctx.label.name
srcs = ctx.files.srcs
deps = ctx.files.deps
config = ctx.file.config
properties = ctx.file.properties
suppressions = ctx.file.suppressions
opts = ctx.attr.opts
sopts = ctx.attr.string_opts
classpath=""
add=False
for file in ctx.files._classpath:
if add:
classpath += ":"
add=True
classpath += file.path
for file in ctx.files.deps:
classpath += ":" + file.path
args = ""
inputs = []
if config:
args += " -c %s" % config.path
inputs.append(config)
if properties:
args += " -p %s" % properties.path
inputs.append(properties)
if suppressions:
inputs.append(suppressions)
cmd = " ".join(
["java -cp %s com.puppycrawl.tools.checkstyle.Main" % classpath] +
[args] +
["--%s" % x for x in opts] +
["--%s %s" % (k, sopts[k]) for k in sopts] +
[x.path for x in srcs]
)
ctx.file_action(
output = ctx.outputs.executable,
content = cmd,
executable = True,
)
files = [ctx.outputs.executable, ctx.file.license] + srcs + deps + ctx.files._classpath + inputs
runfiles = ctx.runfiles(
files = files,
collect_data = True
)
return struct(
files = depset(files),
runfiles = runfiles,
)
checkstyle_test = rule(
implementation = _checkstyle_test_impl,
test = True,
attrs = {
"_classpath": attr.label_list(default=[
Label("#com_puppycrawl_tools_checkstyle//jar"),
Label("#commons_beanutils_commons_beanutils//jar"),
Label("#commons_cli_commons_cli//jar"),
Label("#commons_collections_commons_collections//jar"),
Label("#org_slf4j_slf4j_api//jar"),
Label("#org_slf4j_slf4j_jcl//jar"),
Label("#antlr_antlr//jar"),
Label("#org_antlr_antlr4_runtime//jar"),
Label("#com_google_guava_guava//jar"),
]),
"config": attr.label(allow_single_file=True, default = "//config:checkstyle"),
"suppressions": attr.label(allow_single_file=True, default = "//config:suppressions"),
"license": attr.label(allow_single_file=True, default = "//config:license"),
"properties": attr.label(allow_single_file=True),
"opts": attr.string_list(),
"string_opts": attr.string_dict(),
"srcs": attr.label_list(allow_files = True),
"deps": attr.label_list(),
},
)
"""Run checkstyle
Args:
config: A checkstyle configuration file
suppressions: A checkstyle suppressions file
license: A license file that can be used with the checkstyle license
target
properties: A properties file to be used
opts: Options to be passed on the command line that have no
argument
string_opts: Options to be passed on the command line that have an
argument
srcs: The files to check
"""

Related

Bazel equivalent of Buck's classpath

I'm trying to migrate a project from buck to bazel and looking for an equivalent of the $(classpath) macro available for genrules. Is there anything similar available in bazel to get a list of jars for the classpath of a given java_library?
The best I could come up with is iterating over the list of dependencies and using the $(execpath) macro to get the correspondent jar:
jar_deps = []
for dep in deps: # deps are the same dependencies specified for the java_library
jar_deps.append("$(execpath %s)" % dep)
genrule(
name = "test-rule",
outs = ["test-rule.txt"],
deps = deps,
cmd = "echo \"%s\" > $#" % (":".join(jar_deps)),
)
Is there a better way?
It looks like another way to achieve this is by using a custom rule accessing the Java rule's JavaInfo provider:
def _runtime_deps_providing_rule_impl(ctx):
return [
platform_common.TemplateVariableInfo({
"RUNTIME_DEPS": ":".join([f.path for f in ctx.attr.rule[JavaInfo].transitive_runtime_deps.to_list()]),
})
]
runtime_deps_providing_rule = rule(
implementation = _runtime_deps_providing_rule_impl,
attrs = {
"rule": attr.label(),
},
)
runtime_deps_providing_rule(
name = "test-providing-rule",
rule = ":test-java-rule",
)
genrule(
name = "test-rule",
outs = ["test-rule.txt"],
cmd = "echo \"$(RUNTIME_DEPS)\" > $#",
toolchains = [":test-providing-rule"],
)
The advantage of this is that there is no need to explicitly pass the list of dependencies around.

Java - how to create a file under unknown/variable path?

I have a directory: <dir>\Report\<env>\Log_XXX\Logs
where XXX is randomly created at run time, so I have to create a file inside Logs folder.
Following is what I tried to generate the Logs folder:
new File(System.getProperty("user.dir") + "/Report/" + System.getProperty("env") + "/" + Pattern.compile("^Log_") + "/Logs").mkdirs();
Based on your comments, it appears you are trying to locate the one and only subdirectory whose base name starts with Log_. You can accomplish this with Files.list:
Path logParent = Paths.get(
System.getProperty("user.dir"),
"Report",
System.getProperty("env"));
Path logDir;
try (Stream<Path> listing = Files.list(logParent)) {
Optional<Path> match = listing.filter(p -> Files.isDirectory(p) &&
p.getFilename().toString().startsWith("Log_")).findFirst();
logDir = match.orElseThrow(() -> new RuntimeException(
"No log directory found in " + logParent));
}

How can you get 1.7-compatible output for a java_proto_library?

For a java_library, I can set the javacopts attribute on the build rule. There doesn't appear to be anything similar for java_proto_library or java_lite_proto_library.
I can work around this by setting -source and -target options to javac via the -javacopt flag to bazel, but I'd rather have it encoded in the BUILD files.
You could add a JDK7 toolchain and then build everything with it. E.g., add this to a BUILD file:
java_toolchain(
name = "jdk7",
bootclasspath = ["#bazel_tools//tools/jdk:bootclasspath"],
encoding = "UTF-8",
extclasspath = ["#bazel_tools//tools/jdk:extdir"],
genclass = ["#bazel_tools//tools/jdk:GenClass_deploy.jar"],
header_compiler = ["#bazel_tools//tools/jdk:turbine_deploy.jar"],
ijar = ["#bazel_tools//tools/jdk:ijar"],
javabuilder = ["#bazel_tools//tools/jdk:JavaBuilder_deploy.jar"],
javac = ["#bazel_tools//third_party/java/jdk/langtools:javac_jar"],
javac_supports_workers = 1,
jvm_opts = [
"-XX:+TieredCompilation",
"-XX:TieredStopAtLevel=1",
],
singlejar = ["#bazel_tools//tools/jdk:SingleJar_deploy.jar"],
source_version = "7",
target_version = "7",
visibility = ["//visibility:public"],
)
And then build with:
bazel build --java_toolchain=//whatever:jdk7 //your:target

How to read a properties files and use the values in project Gradle script?

I am working on a Gradle script where I need to read the local.properties file and use the values in the properties file in build.gradle. I am doing it in the below manner. I ran the below script and it is now throwing an error, but it is also not doing anything like creating, deleting, and copying the file. I tried to print the value of the variable and it is showing the correct value.
Can someone let me know if this is the correct way to do this? I think the other way is to define everything in the gradle.properties and use it in the build.gradle. Can someone let me know how could I access the properties in build.gradle from build.properties?
build.gradle file:
apply plugin: 'java'
// Set the group for publishing
group = 'com.true.test'
/**
* Initializing GAVC settings
*/
def buildProperties = new Properties()
file("version.properties").withInputStream {
stream -> buildProperties.load(stream)
}
// If jenkins build, add the jenkins build version to the version. Else add snapshot version to the version.
def env = System.getenv()
if (env["BUILD_NUMBER"]) buildProperties.test+= ".${env["BUILD_NUMBER"]}"
version = buildProperties.test
println "${version}"
// Name is set in the settings.gradle file
group = "com.true.test"
version = buildProperties.test
println "Building ${project.group}:${project.name}:${project.version}"
Properties properties = new Properties()
properties.load(project.file('build.properties').newDataInputStream())
def folderDir = properties.getProperty('build.dir')
def configDir = properties.getProperty('config.dir')
def baseDir = properties.getProperty('base.dir')
def logDir = properties.getProperty('log.dir')
def deployDir = properties.getProperty('deploy.dir')
def testsDir = properties.getProperty('tests.dir')
def packageDir = properties.getProperty('package.dir')
def wrapperDir = properties.getProperty('wrapper.dir')
sourceCompatibility = 1.7
compileJava.options.encoding = 'UTF-8'
repositories {
maven { url "http://arti.oven.c:9000/release" }
}
task swipe(type: Delete) {
println "Delete $projectDir/${folderDir}"
delete "$projectDir/$folderDir"
delete "$projectDir/$logDir"
delete "$projectDir/$deployDir"
delete "$projectDir/$packageDir"
delete "$projectDir/$testsDir"
mkdir("$projectDir/${folderDir}")
mkdir("projectDir/${logDir}")
mkdir("projectDir/${deployDir}")
mkdir("projectDir/${packageDir}")
mkdir("projectDir/${testsDir}")
}
task prepConfigs(type: Copy, overwrite:true, dependsOn: swipe) {
println "The name of ${projectDir}/${folderDir} and ${projectDir}/${configDir}"
from('${projectDir}/${folderDir}')
into('${projectDir}/$configDir}')
include('*.xml')
}
build.properties file:
# -----------------------------------------------------------------
# General Settings
# -----------------------------------------------------------------
application.name = Admin
project.name = Hello Cool
# -----------------------------------------------------------------
# ant build directories
# -----------------------------------------------------------------
sandbox.dir = ${projectDir}/../..
reno.root.dir=${sandbox.dir}/Reno
ant.dir = ${projectDir}/ant
build.dir = ${ant.dir}/build
log.dir = ${ant.dir}/logs
config.dir = ${ant.dir}/configs
deploy.dir = ${ant.dir}/deploy
static.dir = ${ant.dir}/static
package.dir = ${ant.dir}/package
tests.dir = ${ant.dir}/tests
tests.logs.dir = ${tests.dir}/logs
external.dir = ${sandbox.dir}/FlexCommon/External
external.lib.dir = ${external.dir}/libs
If using the default gradle.properties file, you can access the properties directly from within your build.gradle file:
gradle.properties:
applicationName=Admin
projectName=Hello Cool
build.gradle:
task printProps {
doFirst {
println applicationName
println projectName
}
}
If you need to access a custom file, or access properties which include . in them (as it appears you need to do), you can do the following in your build.gradle file:
def props = new Properties()
file("build.properties").withInputStream { props.load(it) }
task printProps {
doFirst {
println props.getProperty("application.name")
println props.getProperty("project.name")
}
}
Take a look at this section of the Gradle documentation for more information.
Edit
If you'd like to dynamically set up some of these properties (as mentioned in a comment below), you can create a properties.gradle file (the name isn't important) and require it in your build.gradle script.
properties.gradle:
ext {
subPath = "some/sub/directory"
fullPath = "$projectDir/$subPath"
}
build.gradle
apply from: 'properties.gradle'
// prints the full expanded path
println fullPath
We can use a separate file (config.groovy in my case) to abstract out all the configuration.
In this example, we're using three environments viz.,
dev
test
prod
which has properties serverName, serverPort and resources. Here we're expecting that the third property resources may be same in multiple environments and so we've abstracted out that logic and overridden in the specific environment wherever necessary:
config.groovy
resources {
serverName = 'localhost'
serverPort = '8090'
}
environments {
dev {
serverName = 'http://localhost'
serverPort = '8080'
}
test {
serverName = 'http://www.testserver.com'
serverPort = '5211'
resources {
serverName = 'resources.testserver.com'
}
}
prod {
serverName = 'http://www.productionserver.com'
serverPort = '80'
resources {
serverName = 'resources.productionserver.com'
serverPort = '80'
}
}
}
Once the properties file is ready, we can use the following in build.gradle to load these settings:
build.gradle
loadProperties()
def loadProperties() {
def environment = hasProperty('env') ? env : 'dev'
println "Current Environment: " + environment
def configFile = file('config.groovy')
def config = new ConfigSlurper(environment).parse(configFile.toURL())
project.ext.config = config
}
task printProperties {
println "serverName: $config.serverName"
println "serverPort: $config.serverPort"
println "resources.serverName: $config.resources.serverName"
println "resources.serverPort: $config.resources.serverPort"
}
Let's run these with different set of inputs:
gradle -q printProperties
Current Environment: dev
serverName: http://localhost
serverPort: 8080
resources.serverName: localhost
resources.serverPort: 8090
gradle -q -Penv=dev printProperties
Current Environment: dev
serverName: http://localhost
serverPort: 8080
resources.serverName: localhost
resources.serverPort: 8090
gradle -q -Penv=test printProperties
Current Environment: test
serverName: http://www.testserver.com
serverPort: 5211
resources.serverName: resources.testserver.com
resources.serverPort: 8090
gradle -q -Penv=prod printProperties
Current Environment: prod
serverName: http://www.productionserver.com
serverPort: 80
resources.serverName: resources.productionserver.com
resources.serverPort: 80
Another way... in build.gradle:
Add :
classpath 'org.flywaydb:flyway-gradle-plugin:3.1'
And this :
def props = new Properties()
file("src/main/resources/application.properties").withInputStream { props.load(it) }
apply plugin: 'flyway'
flyway {
url = props.getProperty("spring.datasource.url")
user = props.getProperty("spring.datasource.username")
password = props.getProperty("spring.datasource.password")
schemas = ['db_example']
}
This is for Kotlin DSL (build.gradle.kts):
import java.util.*
// ...
val properties = Properties().apply {
load(rootProject.file("my-local.properties").reader())
}
val prop = properties["myPropName"]
In Android projects (when applying the android plugin) you can also do this:
import com.android.build.gradle.internal.cxx.configure.gradleLocalProperties
// ...
val properties = gradleLocalProperties(rootDir)
val prop = properties["propName"]
Just had this issue come up today. We found the following worked both locally and in our pipeline:
In build.gradle:
try {
apply from: 'path/name_of_external_props_file.properties'
} catch (Exception e) {}
This way when an external props file which shouldn't get committed to Git or whatever (as in our case) you are using is not found in the pipeline, this 'apply from:' won't throw an error in it. In our use case we have a file with a userid and password that should not get committed to Git. Aside from the problem of file-reading: we found that the variables we had declared in the external file, maven_user and maven_pass, had in fact to be declared in gradle.properties. That is they simply needed to be mentioned as in:
projectName=Some_project_name
version=1.x.y
maven_user=
maven_pass=
We also found that in the external file we had to put single-quotes around these values too or Gradle got confused. So the external file looked like this:
maven_user='abc123'
maven_pass='fghifh7435bvibry9y99ghhrhg9539y5398'
instead of this:
maven_user=abc123
maven_pass=fghifh7435bvibry9y99ghhrhg9539y5398
That's all we had to do and we were fine. I hope this may help others.

How to link classes from JDK into scaladoc-generated doc?

I'm trying to link classes from the JDK into the scaladoc-generated doc.
I've used the -doc-external-doc option of scaladoc 2.10.1 but without success.
I'm using -doc-external-doc:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/rt.jar#http://docs.oracle.com/javase/7/docs/api/, but I get links such as index.html#java.io.File instead of index.html?java/io/File.html.
Seems like this option only works for scaladoc-generated doc.
Did I miss an option in scaladoc or should I fill a feature request?
I've configured sbt as follows:
scalacOptions in (Compile,doc) += "-doc-external-doc:/usr/lib/jvm/java-7-openjdk-amd64/jre/lib/rt.jar#http://docs.oracle.com/javase/7/docs/api"
Note: I've seen the Opts.doc.externalAPI util in the upcoming sbt 0.13. I think a nice addition (not sure if it's possible) would be to pass a ModuleID instead of a File. The util would figure out which file corresponds to the ModuleID.
I use sbt 0.13.5.
There's no out-of-the-box way to have the feature of having Javadoc links inside scaladoc. And as my understanding goes, it's not sbt's fault, but the way scaladoc works. As Josh pointed out in his comment You should report to scaladoc.
There's however a workaround I came up with - postprocess the doc-generated scaladoc so the Java URLs get replaced to form proper Javadoc links.
The file scaladoc.sbt should be placed inside a sbt project and whenever doc task gets executed, the postprocessing via fixJavaLinksTask task kicks in.
NOTE There are lots of hardcoded paths so use it with caution (aka do the polishing however you see fit).
import scala.util.matching.Regex.Match
autoAPIMappings := true
// builds -doc-external-doc
apiMappings += (
file("/Library/Java/JavaVirtualMachines/jdk1.8.0_11.jdk/Contents/Home/jre/lib/rt.jar") ->
url("http://docs.oracle.com/javase/8/docs/api")
)
lazy val fixJavaLinksTask = taskKey[Unit](
"Fix Java links - replace #java.io.File with ?java/io/File.html"
)
fixJavaLinksTask := {
println("Fixing Java links")
val t = (target in (Compile, doc)).value
(t ** "*.html").get.filter(hasJavadocApiLink).foreach { f =>
println("fixing " + f)
val newContent = javadocApiLink.replaceAllIn(IO.read(f), fixJavaLinks)
IO.write(f, newContent)
}
}
val fixJavaLinks: Match => String = m =>
m.group(1) + "?" + m.group(2).replace(".", "/") + ".html"
val javadocApiLink = """\"(http://docs\.oracle\.com/javase/8/docs/api/index\.html)#([^"]*)\"""".r
def hasJavadocApiLink(f: File): Boolean = (javadocApiLink findFirstIn IO.read(f)).nonEmpty
fixJavaLinksTask <<= fixJavaLinksTask triggeredBy (doc in Compile)
I took the answer by #jacek-laskowski and modified it so that it avoid hard-coded strings and could be used for any number of Java libraries, not just the standard one.
Edit: the location of rt.jar is now determined from the runtime using sun.boot.class.path and does not have to be hard coded.
The only thing you need to modify is the map, which I have called externalJavadocMap in the following:
import scala.util.matching.Regex
import scala.util.matching.Regex.Match
val externalJavadocMap = Map(
"owlapi" -> "http://owlcs.github.io/owlapi/apidocs_4_0_2/index.html"
)
/*
* The rt.jar file is located in the path stored in the sun.boot.class.path system property.
* See the Oracle documentation at http://docs.oracle.com/javase/6/docs/technotes/tools/findingclasses.html.
*/
val rtJar: String = System.getProperty("sun.boot.class.path").split(java.io.File.pathSeparator).collectFirst {
case str: String if str.endsWith(java.io.File.separator + "rt.jar") => str
}.get // fail hard if not found
val javaApiUrl: String = "http://docs.oracle.com/javase/8/docs/api/index.html"
val allExternalJavadocLinks: Seq[String] = javaApiUrl +: externalJavadocMap.values.toSeq
def javadocLinkRegex(javadocURL: String): Regex = ("""\"(\Q""" + javadocURL + """\E)#([^"]*)\"""").r
def hasJavadocLink(f: File): Boolean = allExternalJavadocLinks exists {
javadocURL: String =>
(javadocLinkRegex(javadocURL) findFirstIn IO.read(f)).nonEmpty
}
val fixJavaLinks: Match => String = m =>
m.group(1) + "?" + m.group(2).replace(".", "/") + ".html"
/* You can print the classpath with `show compile:fullClasspath` in the SBT REPL.
* From that list you can find the name of the jar for the managed dependency.
*/
lazy val documentationSettings = Seq(
apiMappings ++= {
// Lookup the path to jar from the classpath
val classpath = (fullClasspath in Compile).value
def findJar(nameBeginsWith: String): File = {
classpath.find { attributed: Attributed[File] => (attributed.data ** s"$nameBeginsWith*.jar").get.nonEmpty }.get.data // fail hard if not found
}
// Define external documentation paths
(externalJavadocMap map {
case (name, javadocURL) => findJar(name) -> url(javadocURL)
}) + (file(rtJar) -> url(javaApiUrl))
},
// Override the task to fix the links to JavaDoc
doc in Compile <<= (doc in Compile) map {
target: File =>
(target ** "*.html").get.filter(hasJavadocLink).foreach { f =>
//println(s"Fixing $f.")
val newContent: String = allExternalJavadocLinks.foldLeft(IO.read(f)) {
case (oldContent: String, javadocURL: String) =>
javadocLinkRegex(javadocURL).replaceAllIn(oldContent, fixJavaLinks)
}
IO.write(f, newContent)
}
target
}
)
I am using SBT 0.13.8.

Categories