initial commit for a new layout.

git-svn-id: https://hudson.dev.java.net/svn/hudson/trunk/hudson/main@969 71c3de6d-444a-0410-be80-ed276b4c234a
This commit is contained in:
kohsuke 2006-11-05 21:16:01 +00:00
commit 8a0dc230f4
594 changed files with 48337 additions and 0 deletions

32
core/bin/hudson Normal file
View File

@ -0,0 +1,32 @@
#!/bin/sh
if [ "$HUDSON_HOME" = "" ]; then
echo HUDSON_HOME is not set
exit 1
fi
# search the installation directory
PRG=$0
progname=`basename $0`
saveddir=`pwd`
cd "`dirname $PRG`"
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '.*/.*' > /dev/null; then
PRG="$link"
else
PRG="`dirname $PRG`/$link"
fi
done
BINDIR=`dirname "$PRG"`/..
# make it fully qualified
cd "$saveddir"
BINDIR="`cd "$BINDIR" && pwd`"
exec java -jar "$BINDIR/lib/hudson.jar" "$@"

51
core/bin/slave Normal file
View File

@ -0,0 +1,51 @@
#!/bin/sh
# Usage: slave <workdir> <envvar1> <value1> <envvar2> <value2> ... -- <command> <arg> ...
#
# This wrapper is used to launch a process remotely with some environment variables
# set.
# if there's any environment entries read them
if [ -f $HOME/.hudson_slave_profile ];
then
. $HOME/.hudson_slave_profile
fi
# set the current directory
cd "$1"
shift
# fill in environment variables
while [ $# -gt 1 -a "$1" != "--" ];
do
eval $1="$2"
export $1
shift 2
done
if [ "$1" != "--" ];
then
echo Error: no command given
exit -1
fi
shift
# execute. use eval so that variables can be expanded now
# this allows users to specify $HOME or $DISPLAY or anything else,
# and it works as expected.
#
# but since eval mess up with whitespace ('eval ls "a b"' means the same as 'eval ls a b')
# we need to re-escape arguments (we need to say 'eval ls \"a b\"' to achieve the desired effect)
list=""
for a in "$@"
do
list="$list \"$a\""
done
eval "$list"
ret=$?
# these additional hooks seem to prevent "select: bad filer number" error
# on some systems, so use this as a precaution. We can afford to waste
# one second, can't we?
sleep 1
echo
exit $ret

View File

@ -0,0 +1,88 @@
header {
package hudson.scheduler;
}
class CrontabParser extends Parser("BaseParser");
options {
defaultErrorHandler=false;
}
startRule [CronTab table]
throws ANTLRException
{
long m,h,d,mnth,dow;
}
: m=expr[0] WS h=expr[1] WS d=expr[2] WS mnth=expr[3] WS dow=expr[4] EOF
{
table.bits[0]=m;
table.bits[1]=h;
table.bits[2]=d;
table.bits[3]=mnth;
table.dayOfWeek=(int)dow;
}
;
expr [int field]
returns [long bits=0]
throws ANTLRException
{
long lhs,rhs=0;
}
: lhs=term[field] ("," rhs=expr[field])?
{
bits = lhs|rhs;
}
;
term [int field]
returns [long bits=0]
throws ANTLRException
{
int d=1,s,e,t;
}
: (token "-")=> s=token "-" e=token ( "/" d=token )?
{
bits = doRange(s,e,d,field);
}
| t=token
{
rangeCheck(t,field);
bits = 1L<<t;
}
| "*" ("/" d=token )?
{
bits = doRange(d,field);
}
;
token
returns [int value=0]
: t:TOKEN
{
value = Integer.parseInt(t.getText());
}
;
class CrontabLexer extends Lexer;
options {
defaultErrorHandler=false;
}
TOKEN
options {
paraphrase="a number";
}
: ('0'..'9')+
;
WS
options {
paraphrase="space";
}
: (' '|'\t')+
;
MINUS: '-';
STAR: '*';
DIV: '/';
OR: ',';

View File

@ -0,0 +1,16 @@
package hudson;
import java.io.FilterOutputStream;
import java.io.OutputStream;
/**
* @author Kohsuke Kawaguchi
*/
public class CloseProofOutputStream extends FilterOutputStream {
public CloseProofOutputStream(OutputStream out) {
super(out);
}
public void close() {
}
}

View File

@ -0,0 +1,29 @@
package hudson;
import org.apache.tools.ant.taskdefs.Execute;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Vector;
/**
* @author Kohsuke Kawaguchi
*/
public class EnvVars {
/**
* Environmental variables that we've inherited.
*/
public static final Map<String,String> masterEnvVars;
static {
Vector<String> envs = Execute.getProcEnvironment();
Map<String,String> m = new HashMap<String,String>();
for (String e : envs) {
int idx = e.indexOf('=');
m.put(e.substring(0, idx), e.substring(idx + 1));
}
masterEnvVars = Collections.unmodifiableMap(m);
}
}

View File

@ -0,0 +1,16 @@
package hudson;
/**
* Marker interface that designates extensible components
* in Hudson that can be implemented by {@link Plugin}s.
*
* <p>
* Interfaces/classes that implement this interface can be extended by plugins.
* See respective interfaces/classes for more about how to register custom
* implementations to Hudson.
*
* @author Kohsuke Kawaguchi
* @see Plugin
*/
public interface ExtensionPoint {
}

View File

@ -0,0 +1,15 @@
package hudson;
import java.util.Calendar;
/**
* Provides a RSS feed view of the data.
*
* @author Kohsuke Kawaguchi
*/
public interface FeedAdapter<E> {
String getEntryTitle(E entry);
String getEntryUrl(E entry);
String getEntryID(E entry);
Calendar getEntryTimestamp(E entry);
}

View File

@ -0,0 +1,143 @@
package hudson;
import java.io.File;
import java.io.IOException;
/**
* {@link File} like path-manipulation object.
*
* <p>
* In general, because programs could be executed remotely,
* we need two path strings to identify the same directory.
* One from a point of view of the master (local), the other
* from a point of view of the slave (remote).
*
* <p>
* This class allows path manipulation to be done
* and allow the local/remote versions to be obtained
* after the computation.
*
* @author Kohsuke Kawaguchi
*/
public final class FilePath {
private final File local;
private final String remote;
public FilePath(File local, String remote) {
this.local = local;
this.remote = remote;
}
/**
* Useful when there's no remote path.
*/
public FilePath(File local) {
this(local,local.getPath());
}
public FilePath(FilePath base, String rel) {
this.local = new File(base.local,rel);
if(base.isUnix()) {
this.remote = base.remote+'/'+rel;
} else {
this.remote = base.remote+'\\'+rel;
}
}
/**
* Checks if the remote path is Unix.
*/
private boolean isUnix() {
// Windows can handle '/' as a path separator but Unix can't,
// so err on Unix side
return remote.indexOf("\\")==-1;
}
public File getLocal() {
return local;
}
public String getRemote() {
return remote;
}
/**
* Creates this directory.
*/
public void mkdirs() throws IOException {
if(!local.mkdirs() && !local.exists())
throw new IOException("Failed to mkdirs: "+local);
}
/**
* Deletes all the contents of this directory, but not the directory itself
*/
public void deleteContents() throws IOException {
// TODO: consider doing this remotely if possible
Util.deleteContentsRecursive(getLocal());
}
/**
* Gets just the file name portion.
*
* This method assumes that the file name is the same between local and remote.
*/
public String getName() {
return local.getName();
}
/**
* The same as {@code new FilePath(this,rel)} but more OO.
*/
public FilePath child(String rel) {
return new FilePath(this,rel);
}
/**
* Gets the parent file.
*/
public FilePath getParent() {
int len = remote.length()-1;
while(len>=0) {
char ch = remote.charAt(len);
if(ch=='\\' || ch=='/')
break;
len--;
}
return new FilePath( local.getParentFile(), remote.substring(0,len) );
}
/**
* Creates a temporary file.
*/
public FilePath createTempFile(String prefix, String suffix) throws IOException {
File f = File.createTempFile(prefix, suffix, getLocal());
return new FilePath(this,f.getName());
}
/**
* Deletes this file.
*/
public boolean delete() {
return local.delete();
}
public boolean exists() {
return local.exists();
}
/**
* Always use {@link #getLocal()} or {@link #getRemote()}
*/
@Deprecated
public String toString() {
// to make writing JSPs easily, return local
return local.toString();
}
/**
* {@link FilePath} constant that can be used if the directory is not importatn.
*/
public static final FilePath RANDOM = new FilePath(new File("."));
}

View File

@ -0,0 +1,187 @@
package hudson;
import hudson.model.ModelObject;
import hudson.model.Node;
import hudson.model.Project;
import hudson.model.Run;
import hudson.model.Hudson;
import org.kohsuke.stapler.Ancestor;
import org.kohsuke.stapler.StaplerRequest;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.Calendar;
import java.util.SortedMap;
import java.util.logging.LogRecord;
import java.util.logging.SimpleFormatter;
import java.io.File;
/**
* @author Kohsuke Kawaguchi
*/
public class Functions {
public static boolean isModel(Object o) {
return o instanceof ModelObject;
}
public static String xsDate(Calendar cal) {
return Util.XS_DATETIME_FORMATTER.format(cal.getTime());
}
public static String getDiffString(int i) {
if(i==0) return "\u00B10"; // +/-0
String s = Integer.toString(i);
if(i>0) return "+"+s;
else return s;
}
/**
* {@link #getDiffString2(int)} that doesn't show anything for +/-0
*/
public static String getDiffString2(int i) {
if(i==0) return "";
String s = Integer.toString(i);
if(i>0) return "+"+s;
else return s;
}
/**
* Adds the proper suffix.
*/
public static String addSuffix(int n, String singular, String plural) {
StringBuffer buf = new StringBuffer();
buf.append(n).append(' ');
if(n==1)
buf.append(singular);
else
buf.append(plural);
return buf.toString();
}
public static RunUrl decompose(StaplerRequest req) {
List<Ancestor> ancestors = (List<Ancestor>) req.getAncestors();
for (Ancestor anc : ancestors) {
if(anc.getObject() instanceof Run) {
// bingo
String ancUrl = anc.getUrl();
String reqUri = req.getOriginalRequestURI();
return new RunUrl(
(Run) anc.getObject(), ancUrl,
reqUri.substring(ancUrl.length()),
req.getContextPath() );
}
}
return null;
}
public static final class RunUrl {
private final String contextPath;
private final String basePortion;
private final String rest;
private final Run run;
public RunUrl(Run run, String basePortion, String rest, String contextPath) {
this.run = run;
this.basePortion = basePortion;
this.rest = rest;
this.contextPath = contextPath;
}
public String getBaseUrl() {
return basePortion;
}
/**
* Returns the same page in the next build.
*/
public String getNextBuildUrl() {
return getUrl(run.getNextBuild());
}
/**
* Returns the same page in the previous build.
*/
public String getPreviousBuildUrl() {
return getUrl(run.getPreviousBuild());
}
private String getUrl(Run n) {
if(n ==null)
return null;
else {
String url = contextPath + '/' + n.getUrl();
assert url.endsWith("/");
url = url.substring(0,url.length()-1); // cut off the trailing '/'
return url+rest;
}
}
}
public static Node.Mode[] getNodeModes() {
return Node.Mode.values();
}
public static String getProjectListString(List<Project> projects) {
return Project.toNameList(projects);
}
public static Object ifThenElse(boolean cond, Object thenValue, Object elseValue) {
return cond ? thenValue : elseValue;
}
public static String appendIfNotNull(String text, String suffix, String nullText) {
return text == null ? nullText : text + suffix;
}
public static Map getSystemProperties() {
return new TreeMap(System.getProperties());
}
public static Map getEnvVars() {
return new TreeMap(EnvVars.masterEnvVars);
}
public static boolean isWindows() {
return File.pathSeparatorChar==';';
}
public static List<LogRecord> getLogRecords() {
return Hudson.logRecords;
}
public static String printLogRecord(LogRecord r) {
return formatter.format(r);
}
public static Cookie getCookie(HttpServletRequest req,String name) {
Cookie[] cookies = req.getCookies();
if(cookies!=null) {
for (Cookie cookie : cookies) {
if(cookie.getName().equals(name)) {
return cookie;
}
}
}
return null;
}
/**
* Creates a sub map by using the given range (both ends inclusive).
*/
public static <V> SortedMap<Integer,V> filter(SortedMap<Integer,V> map, String from, String to) {
if(from==null && to==null) return map;
if(to==null)
return map.headMap(Integer.parseInt(from)-1);
if(from==null)
return map.tailMap(Integer.parseInt(to));
return map.subMap(Integer.parseInt(to),Integer.parseInt(from)-1);
}
private static final SimpleFormatter formatter = new SimpleFormatter();
}

View File

@ -0,0 +1,102 @@
package hudson;
import hudson.model.TaskListener;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
/**
* Starts a process.
*
* <p>
* This hides the difference between running programs locally vs remotely.
*
*
* <h2>'env' parameter</h2>
* <p>
* To allow important environment variables to be copied over to the remote machine,
* the 'env' parameter shouldn't contain default inherited environment varialbles
* (which often contains machine-specific information, like PATH, TIMEZONE, etc.)
*
* <p>
* {@link Launcher} is responsible for inheriting environment variables.
*
*
* @author Kohsuke Kawaguchi
*/
public class Launcher {
protected final TaskListener listener;
public Launcher(TaskListener listener) {
this.listener = listener;
}
public final Proc launch(String cmd, Map env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd,Util.mapToEnv(env),out,workDir);
}
public final Proc launch(String[] cmd, Map env, OutputStream out, FilePath workDir) throws IOException {
return launch(cmd,Util.mapToEnv(env),out,workDir);
}
public final Proc launch(String[] cmd,Map env, InputStream in, OutputStream out) throws IOException {
return launch(cmd,Util.mapToEnv(env),in,out);
}
public final Proc launch(String cmd,String[] env,OutputStream out, FilePath workDir) throws IOException {
return launch(Util.tokenize(cmd),env,out,workDir);
}
public Proc launch(String[] cmd,String[] env,OutputStream out, FilePath workDir) throws IOException {
printCommandLine(cmd, workDir);
return new Proc(cmd,Util.mapToEnv(inherit(env)),out,workDir.getLocal());
}
public Proc launch(String[] cmd,String[] env,InputStream in,OutputStream out) throws IOException {
printCommandLine(cmd, null);
return new Proc(cmd,inherit(env),in,out);
}
/**
* Returns true if this {@link Launcher} is going to launch on Unix.
*/
public boolean isUnix() {
return File.pathSeparatorChar==':';
}
/**
* Expands the list of environment variables by inheriting current env variables.
*/
private Map<String,String> inherit(String[] env) {
Map<String,String> m = new HashMap<String,String>(EnvVars.masterEnvVars);
for (String e : env) {
int index = e.indexOf('=');
String key = e.substring(0,index);
String value = e.substring(index+1);
if(value.length()==0)
m.remove(key);
else
m.put(key,value);
}
return m;
}
private void printCommandLine(String[] cmd, FilePath workDir) {
StringBuffer buf = new StringBuffer();
if (workDir != null) {
buf.append('[');
buf.append(workDir.getRemote().replaceFirst("^.+[/\\\\]", ""));
buf.append("] ");
}
buf.append('$');
for (String c : cmd) {
buf.append(' ').append(c);
}
listener.getLogger().println(buf.toString());
}
}

View File

@ -0,0 +1,138 @@
package hudson;
import hudson.model.ExternalJob;
import hudson.model.ExternalRun;
import hudson.model.Hudson;
import hudson.model.Job;
import hudson.model.Result;
import hudson.util.DualOutputStream;
import hudson.util.EncodingStream;
import java.io.File;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.List;
/**
* Entry point to Hudson from command line.
*
* @author Kohsuke Kawaguchi
*/
public class Main {
public static void main(String[] args) {
try {
System.exit(run(args));
} catch (Exception e) {
e.printStackTrace();
System.exit(-1);
}
}
public static int run(String[] args) throws Exception {
String home = getHudsonHome();
if(home==null) {
System.err.println("HUDSON_HOME is not set.");
return -1;
}
if(home.startsWith("http")) {
return remotePost(args);
} else {
return localPost(args);
}
}
private static String getHudsonHome() {
return EnvVars.masterEnvVars.get("HUDSON_HOME");
}
/**
* Run command and place the result directly into the local installation of Hudson.
*/
public static int localPost(String[] args) throws Exception {
Hudson app = new Hudson(new File(getHudsonHome()),null);
Job job = app.getJob(args[0]);
if(!(job instanceof ExternalJob)) {
System.err.println(args[0]+" is not a valid external job name in Hudson");
return -1;
}
ExternalJob ejob = (ExternalJob) job;
ExternalRun run = ejob.newBuild();
// run the command
List<String> cmd = new ArrayList<String>();
for( int i=1; i<args.length; i++ )
cmd.add(args[i]);
run.run(cmd.toArray(new String[cmd.size()]));
return run.getResult()==Result.SUCCESS?0:1;
}
/**
* Run command and place the result to a remote Hudson installation
*/
public static int remotePost(String[] args) throws Exception {
String projectName = args[0];
String home = getHudsonHome();
if(!home.endsWith("/")) home = home + '/'; // make sure it ends with '/'
{// check if the home is set correctly
HttpURLConnection con = (HttpURLConnection)new URL(home).openConnection();
con.connect();
if(con.getResponseCode()!=200
|| con.getHeaderField("X-Hudson")==null) {
System.err.println(home+" is not Hudson ("+con.getResponseMessage()+")");
return -1;
}
}
String projectNameEnc = URLEncoder.encode(projectName,"UTF-8").replaceAll("\\+","%20");
{// check if the job name is correct
HttpURLConnection con = (HttpURLConnection)new URL(home+"job/"+projectNameEnc+"/acceptBuildResult").openConnection();
con.connect();
if(con.getResponseCode()!=200) {
System.err.println(projectName+" is not a valid job name on "+home+" ("+con.getResponseMessage()+")");
return -1;
}
}
// start a remote connection
HttpURLConnection con = (HttpURLConnection) new URL(home+"job/"+projectNameEnc+"/postBuildResult").openConnection();
con.setDoOutput(true);
con.connect();
OutputStream os = con.getOutputStream();
Writer w = new OutputStreamWriter(os,"UTF-8");
w.write("<?xml version='1.0' encoding='UTF-8'?>");
w.write("<run><log encoding='hexBinary'>");
w.flush();
// run the command
long start = System.currentTimeMillis();
List<String> cmd = new ArrayList<String>();
for( int i=1; i<args.length; i++ )
cmd.add(args[i]);
Proc proc = new Proc(cmd.toArray(new String[0]),(String[])null,System.in,
new DualOutputStream(System.out,new EncodingStream(os)));
int ret = proc.join();
w.write("</log><result>"+ret+"</result><duration>"+(System.currentTimeMillis()-start)+"</duration></run>");
w.close();
if(con.getResponseCode()!=200) {
Util.copyStream(con.getErrorStream(),System.err);
}
return ret;
}
}

View File

@ -0,0 +1,105 @@
package hudson;
import hudson.model.Hudson;
import hudson.scm.SCM;
import hudson.tasks.Builder;
import hudson.tasks.Publisher;
import hudson.triggers.Trigger;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.net.URL;
/**
* Base class of Hudson plugin.
*
* <p>
* A plugin needs to derive from this class.
*
* @author Kohsuke Kawaguchi
* @since 1.42
*/
public abstract class Plugin {
/**
* Set by the {@link PluginManager}.
*/
/*package*/ PluginWrapper wrapper;
/**
* Called when a plugin is loaded to make the {@link ServletContext} object available to a plugin.
* This object allows plugins to talk to the surrounding environment.
*
* <p>
* The default implementation is no-op.
*
* @param context
* Always non-null.
*
* @since 1.42
*/
public void setServletContext(ServletContext context) {
}
/**
* Called to allow plugins to initialize themselves.
*
* <p>
* This method is called after {@link #setServletContext(ServletContext)} is invoked.
* You can also use {@link Hudson#getInstance()} to access the singleton hudson instance.
*
* <p>
* Plugins should override this method and register custom
* {@link Publisher}, {@link Builder}, {@link SCM}, and {@link Trigger}s to the corresponding list.
* See {@link ExtensionPoint} for the complete list of extension points in Hudson.
*
*
* @throws Exception
* any exception thrown by the plugin during the initialization will disable plugin.
*
* @since 1.42
* @see ExtensionPoint
*/
public void start() throws Exception {
}
/**
* Called to orderly shut down Hudson.
*
* <p>
* This is a good opportunity to clean up resources that plugin started.
* This method will not be invoked if the {@link #start()} failed abnormally.
*
* @throws Exception
* if any exception is thrown, it is simply recorded and shut-down of other
* plugins continue. This is primarily just a convenience feature, so that
* each plugin author doesn't have to worry about catching an exception and
* recording it.
*
* @since 1.42
*/
public void stop() throws Exception {
}
/**
* This method serves static resources in the plugin under <tt>hudson/plugin/SHORTNAME</tt>.
*/
public void doDynamic(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
String path = req.getRestOfPath();
if(path.length()==0)
path = "/";
if(path.indexOf("..")!=-1 || path.length()<1) {
// don't serve anything other than files in the sub directory.
rsp.sendError(HttpServletResponse.SC_BAD_REQUEST);
return;
}
rsp.serveFile(req, new URL(wrapper.baseResourceURL,'.'+path));
}
}

View File

@ -0,0 +1,146 @@
package hudson;
import hudson.model.Hudson;
import hudson.util.Service;
import java.util.logging.Level;
import javax.servlet.ServletContext;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.logging.Logger;
/**
* Manages {@link PluginWrapper}s.
*
* @author Kohsuke Kawaguchi
*/
public final class PluginManager {
/**
* All discovered plugins.
*/
private final List<PluginWrapper> plugins = new ArrayList<PluginWrapper>();
/**
* All active plugins.
*/
private final List<PluginWrapper> activePlugins = new ArrayList<PluginWrapper>();
/**
* Plug-in root directory.
*/
public final File rootDir;
public final ServletContext context;
/**
* {@link ClassLoader} that can load all the publicly visible classes from plugins
* (and including the classloader that loads Hudson itself.)
*
*/
// implementation is minmal --- just enough to run XStream
// and load plugin-contributed classes.
public final ClassLoader uberClassLoader = new UberClassLoader();
public PluginManager(ServletContext context) {
this.context = context;
rootDir = new File(Hudson.getInstance().getRootDir(),"plugins");
if(!rootDir.exists())
rootDir.mkdirs();
File[] archives = rootDir.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return name.endsWith(".hpi") // plugin jar file
|| name.endsWith(".hpl"); // linked plugin. for debugging.
}
});
if(archives==null) {
LOGGER.severe("Hudson is unable to create "+rootDir+"\nPerhaps its security privilege is insufficient");
return;
}
for( File arc : archives ) {
try {
PluginWrapper p = new PluginWrapper(this, arc);
plugins.add(p);
if(p.isActive())
activePlugins.add(p);
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Failed to load a plug-in " + arc, e);
}
}
}
public List<PluginWrapper> getPlugins() {
return plugins;
}
public PluginWrapper getPlugin(String shortName) {
for (PluginWrapper p : plugins) {
if(p.getShortName().equals(shortName))
return p;
}
return null;
}
/**
* Discover all the service provider implementations of the given class,
* via <tt>META-INF/services</tt>.
*/
public <T> Collection<Class<? extends T>> discover( Class<T> spi ) {
Set<Class<? extends T>> result = new HashSet<Class<? extends T>>();
for (PluginWrapper p : activePlugins) {
Service.load(spi, p.classLoader, result);
}
return result;
}
/**
* Orderly terminates all the plugins.
*/
public void stop() {
for (PluginWrapper p : activePlugins) {
p.stop();
}
}
private final class UberClassLoader extends ClassLoader {
public UberClassLoader() {
super(PluginManager.class.getClassLoader());
}
@Override
protected Class<?> findClass(String name) throws ClassNotFoundException {
for (PluginWrapper p : activePlugins) {
try {
return p.classLoader.loadClass(name);
} catch (ClassNotFoundException e) {
//not found. try next
}
}
// not found in any of the classloader. delegate.
throw new ClassNotFoundException(name);
}
@Override
protected URL findResource(String name) {
for (PluginWrapper p : activePlugins) {
URL url = p.classLoader.getResource(name);
if(url!=null)
return url;
}
return null;
}
}
private static final Logger LOGGER = Logger.getLogger(PluginManager.class.getName());
}

View File

@ -0,0 +1,361 @@
package hudson;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.types.FileSet;
import org.apache.tools.ant.taskdefs.Expand;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.BufferedReader;
import java.io.FileReader;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.List;
import java.util.jar.Manifest;
import java.util.logging.Logger;
import hudson.util.IOException2;
/**
* Represents a Hudson plug-in and associated control information
* for Hudson to control {@link Plugin}.
*
* <p>
* A plug-in is packaged into a jar file whose extension is <tt>".hpi"</tt>,
* A plugin needs to have a special manifest entry to identify what it is.
*
* <p>
* At the runtime, a plugin has two distinct state axis.
* <ol>
* <li>Enabled/Disabled. If enabled, Hudson is going to use it
* next time Hudson runs. Otherwise the next run will ignore it.
* <li>Activated/Deactivated. If activated, that means Hudson is using
* the plugin in this session. Otherwise it's not.
* </ol>
* <p>
* For example, an activated but disabled plugin is still running but the next
* time it won't.
*
* @author Kohsuke Kawaguchi
*/
public final class PluginWrapper {
/**
* Plugin manifest.
* Contains description of the plugin.
*/
private final Manifest manifest;
/**
* Loaded plugin instance.
* Null if disabled.
*/
public final Plugin plugin;
/**
* {@link ClassLoader} for loading classes from this plugin.
* Null if disabled.
*/
public final ClassLoader classLoader;
/**
* Base URL for loading static resources from this plugin.
* Null if disabled. The static resources are mapped under
* <tt>hudson/plugin/SHORTNAME/</tt>.
*/
public final URL baseResourceURL;
/**
* Used to control enable/disable setting of the plugin.
* If this file exists, plugin will be disabled.
*/
private final File disableFile;
/**
* Short name of the plugin. The "abc" portion of "abc.hpl".
*/
private final String shortName;
/**
* @param archive
* A .hpi archive file jar file, or a .hpl linked plugin.
*
* @throws IOException
* if an installation of this plugin failed. The caller should
* proceed to work with other plugins.
*/
public PluginWrapper(PluginManager owner, File archive) throws IOException {
LOGGER.info("Loading plugin: "+archive);
this.shortName = getShortName(archive);
boolean isLinked = archive.getName().endsWith(".hpl");
File expandDir = null; // if .hpi, this is the directory where war is expanded
if(isLinked) {
// resolve the .hpl file to the location of the manifest file
archive = resolve(archive,new BufferedReader(new FileReader(archive)).readLine());
// then parse manifest
FileInputStream in = new FileInputStream(archive);
try {
manifest = new Manifest(in);
} catch(IOException e) {
throw new IOException2("Failed to load "+archive,e);
} finally {
in.close();
}
} else {
expandDir = new File(archive.getParentFile(), shortName);
explode(archive,expandDir);
File manifestFile = new File(expandDir,"META-INF/MANIFEST.MF");
if(!manifestFile.exists()) {
throw new IOException("Plugin installation failed. No manifest at "+manifestFile);
}
FileInputStream fin = new FileInputStream(manifestFile);
try {
manifest = new Manifest(fin);
} finally {
fin.close();
}
}
// TODO: define a mechanism to hide classes
// String export = manifest.getMainAttributes().getValue("Export");
List<URL> paths = new ArrayList<URL>();
if(isLinked) {
String classPath = manifest.getMainAttributes().getValue("Class-Path");
for (String s : classPath.split(" +")) {
File file = resolve(archive, s);
if(file.getName().contains("*")) {
// handle wildcard
FileSet fs = new FileSet();
File dir = file.getParentFile();
fs.setDir(dir);
fs.setIncludes(file.getName());
for( String included : fs.getDirectoryScanner(new Project()).getIncludedFiles() ) {
paths.add(new File(dir,included).toURL());
}
} else {
if(!file.exists())
throw new IOException("No such file: "+file);
paths.add(file.toURL());
}
}
this.baseResourceURL = resolve(archive,
manifest.getMainAttributes().getValue("Resource-Path")).toURL();
} else {
File classes = new File(expandDir,"WEB-INF/classes");
if(classes.exists())
paths.add(classes.toURL());
File lib = new File(expandDir,"WEB-INF/lib");
File[] libs = lib.listFiles(JAR_FILTER);
if(libs!=null) {
for (File jar : libs)
paths.add(jar.toURL());
}
this.baseResourceURL = expandDir.toURL();
}
this.classLoader = new URLClassLoader(paths.toArray(new URL[0]), getClass().getClassLoader());
disableFile = new File(archive.getPath()+".disabled");
if(disableFile.exists()) {
LOGGER.info("Plugin is disabled");
this.plugin = null;
return;
}
String className = manifest.getMainAttributes().getValue("Plugin-Class");
if(className ==null) {
throw new IOException("Plugin installation failed. No 'Plugin-Class' entry in the manifest of "+archive);
}
try {
Class clazz = classLoader.loadClass(className);
Object plugin = clazz.newInstance();
if(!(plugin instanceof Plugin)) {
throw new IOException(className+" doesn't extend from hudson.Plugin");
}
this.plugin = (Plugin)plugin;
this.plugin.wrapper = this;
} catch (ClassNotFoundException e) {
IOException ioe = new IOException("Unable to load " + className + " from " + archive);
ioe.initCause(e);
throw ioe;
} catch (IllegalAccessException e) {
IOException ioe = new IOException("Unable to create instance of " + className + " from " + archive);
ioe.initCause(e);
throw ioe;
} catch (InstantiationException e) {
IOException ioe = new IOException("Unable to create instance of " + className + " from " + archive);
ioe.initCause(e);
throw ioe;
}
// initialize plugin
try {
plugin.setServletContext(owner.context);
plugin.start();
} catch(Throwable t) {
// gracefully handle any error in plugin.
IOException ioe = new IOException("Failed to initialize");
ioe.initCause(t);
throw ioe;
}
}
private static File resolve(File base, String relative) {
File rel = new File(relative);
if(rel.isAbsolute())
return rel;
else
return new File(base.getParentFile(),relative);
}
/**
* Returns the URL of the index page jelly script.
*/
public URL getIndexPage() {
return classLoader.getResource("index.jelly");
}
/**
* Returns the short name suitable for URL.
*/
public String getShortName() {
return shortName;
}
/**
* Returns a one-line descriptive name of this plugin.
*/
public String getLongName() {
String name = manifest.getMainAttributes().getValue("Long-Name");
if(name!=null) return name;
return shortName;
}
/**
* Gets the "abc" portion from "abc.ext".
*/
private static String getShortName(File archive) {
String n = archive.getName();
int idx = n.lastIndexOf('.');
if(idx>=0)
n = n.substring(0,idx);
return n;
}
/**
* Terminates the plugin.
*/
void stop() {
LOGGER.info("Stopping "+shortName);
try {
plugin.stop();
} catch(Throwable t) {
System.err.println("Failed to shut down "+shortName);
System.err.println(t);
}
}
/**
* Enables this plugin next time Hudson runs.
*/
public void enable() throws IOException {
if(!disableFile.delete())
throw new IOException("Failed to delete "+disableFile);
}
/**
* Disables this plugin next time Hudson runs.
*/
public void disable() throws IOException {
// creates an empty file
OutputStream os = new FileOutputStream(disableFile);
os.close();
}
/**
* Returns true if this plugin is enabled for this session.
*/
public boolean isActive() {
return plugin!=null;
}
/**
* If true, the plugin is going to be activated next time
* Hudson runs.
*/
public boolean isEnabled() {
return !disableFile.exists();
}
/**
* Explodes the plugin into a directory, if necessary.
*/
private void explode(File archive, File destDir) throws IOException {
if(!destDir.exists())
destDir.mkdirs();
// timestamp check
File explodeTime = new File(destDir,".timestamp");
if(explodeTime.exists() && explodeTime.lastModified()>archive.lastModified())
return; // no need to expand
LOGGER.info("Extracting "+archive);
try {
Expand e = new Expand();
e.setProject(new Project());
e.setTaskType("unzip");
e.setSrc(archive);
e.setDest(destDir);
e.execute();
} catch (BuildException x) {
IOException ioe = new IOException("Failed to expand " + archive);
ioe.initCause(x);
throw ioe;
}
Util.touch(explodeTime);
}
//
//
// Action methods
//
//
public void doMakeEnabled(StaplerRequest req, StaplerResponse rsp) throws IOException {
enable();
rsp.setStatus(200);
}
public void doMakeDisabled(StaplerRequest req, StaplerResponse rsp) throws IOException {
disable();
rsp.setStatus(200);
}
private static final Logger LOGGER = Logger.getLogger(PluginWrapper.class.getName());
/**
* Filter for jar files.
*/
private static final FilenameFilter JAR_FILTER = new FilenameFilter() {
public boolean accept(File dir,String name) {
return name.endsWith(".jar");
}
};
}

View File

@ -0,0 +1,118 @@
package hudson;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Map;
/**
* External process wrapper.
*
* <p>
* Used for launching, monitoring, waiting for a process.
*
* @author Kohsuke Kawaguchi
*/
public final class Proc {
private final Process proc;
private final Thread t1,t2;
public Proc(String cmd,Map env, OutputStream out, File workDir) throws IOException {
this(cmd,Util.mapToEnv(env),out,workDir);
}
public Proc(String[] cmd,Map env,InputStream in, OutputStream out) throws IOException {
this(cmd,Util.mapToEnv(env),in,out);
}
public Proc(String cmd,String[] env,OutputStream out, File workDir) throws IOException {
this( Util.tokenize(cmd), env, out, workDir );
}
public Proc(String[] cmd,String[] env,OutputStream out, File workDir) throws IOException {
this( calcName(cmd), Runtime.getRuntime().exec(cmd,env,workDir), null, out );
}
public Proc(String[] cmd,String[] env,InputStream in,OutputStream out) throws IOException {
this( calcName(cmd), Runtime.getRuntime().exec(cmd,env), in, out );
}
private Proc( String name, Process proc, InputStream in, OutputStream out ) throws IOException {
this.proc = proc;
t1 = new Copier(name+": stdout copier", proc.getInputStream(), out);
t1.start();
t2 = new Copier(name+": stderr copier", proc.getErrorStream(), out);
t2.start();
if(in!=null)
new ByteCopier(name+": stdin copier",in,proc.getOutputStream()).start();
else
proc.getOutputStream().close();
}
public int join() {
try {
t1.join();
t2.join();
return proc.waitFor();
} catch (InterruptedException e) {
// aborting. kill the process
proc.destroy();
return -1;
}
}
private static class Copier extends Thread {
private final InputStream in;
private final OutputStream out;
public Copier(String threadName, InputStream in, OutputStream out) {
super(threadName);
this.in = in;
this.out = out;
}
public void run() {
try {
Util.copyStream(in,out);
in.close();
} catch (IOException e) {
// TODO: what to do?
}
}
}
private static class ByteCopier extends Thread {
private final InputStream in;
private final OutputStream out;
public ByteCopier(String threadName, InputStream in, OutputStream out) {
super(threadName);
this.in = in;
this.out = out;
}
public void run() {
try {
while(true) {
int ch = in.read();
if(ch==-1) break;
out.write(ch);
}
in.close();
out.close();
} catch (IOException e) {
// TODO: what to do?
}
}
}
private static String calcName(String[] cmd) {
StringBuffer buf = new StringBuffer();
for (String token : cmd) {
if(buf.length()>0) buf.append(' ');
buf.append(token);
}
return buf.toString();
}
}

View File

@ -0,0 +1,326 @@
package hudson;
import hudson.model.BuildListener;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.Map;
import java.util.ResourceBundle;
import java.util.StringTokenizer;
import java.util.SimpleTimeZone;
import java.util.logging.Logger;
import java.util.logging.Level;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.text.SimpleDateFormat;
import org.apache.tools.ant.taskdefs.Chmod;
import org.apache.tools.ant.taskdefs.Copy;
import org.apache.tools.ant.BuildException;
/**
* @author Kohsuke Kawaguchi
*/
public class Util {
/**
* Loads the contents of a file into a string.
*/
public static String loadFile(File logfile) throws IOException {
if(!logfile.exists())
return "";
StringBuffer str = new StringBuffer((int)logfile.length());
BufferedReader r = new BufferedReader(new FileReader(logfile));
char[] buf = new char[1024];
int len;
while((len=r.read(buf,0,buf.length))>0)
str.append(buf,0,len);
r.close();
return str.toString();
}
/**
* Deletes the contents of the given directory (but not the directory itself)
* recursively.
*
* @throws IOException
* if the operation fails.
*/
public static void deleteContentsRecursive(File file) throws IOException {
File[] files = file.listFiles();
if(files==null)
return; // the directory didn't exist in the first place
for (File child : files) {
if (child.isDirectory())
deleteContentsRecursive(child);
deleteFile(child);
}
}
private static void deleteFile(File f) throws IOException {
if (!f.delete()) {
if(!f.exists())
// we are trying to delete a file that no longer exists, so this is not an error
return;
// perhaps this file is read-only?
// try chmod. this becomes no-op if this is not Unix.
try {
Chmod chmod = new Chmod();
chmod.setProject(new org.apache.tools.ant.Project());
chmod.setFile(f);
chmod.setPerm("u+w");
chmod.execute();
} catch (BuildException e) {
LOGGER.log(Level.INFO,"Failed to chmod "+f,e);
}
throw new IOException("Unable to delete " + f.getPath());
}
}
public static void deleteRecursive(File dir) throws IOException {
deleteContentsRecursive(dir);
deleteFile(dir);
}
/**
* Creates a new temporary directory.
*/
public static File createTempDir() throws IOException {
File tmp = File.createTempFile("hudson", "tmp");
if(!tmp.delete())
throw new IOException("Failed to delete "+tmp);
if(!tmp.mkdirs())
throw new IOException("Failed to create a new directory "+tmp);
return tmp;
}
private static final Pattern errorCodeParser = Pattern.compile(".*error=([0-9]+).*");
/**
* On Windows, error messages for IOException aren't very helpful.
* This method generates additional user-friendly error message to the listener
*/
public static void displayIOException( IOException e, BuildListener listener ) {
if(File.separatorChar!='\\')
return; // not Windows
Matcher m = errorCodeParser.matcher(e.getMessage());
if(!m.matches())
return; // failed to parse
try {
ResourceBundle rb = ResourceBundle.getBundle("/hudson/win32errors");
listener.getLogger().println(rb.getString("error"+m.group(1)));
} catch (Exception _) {
// silently recover from resource related failures
}
}
/**
* Guesses the current host name.
*/
public static String getHostName() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
return "localhost";
}
}
public static void copyStream(InputStream in,OutputStream out) throws IOException {
byte[] buf = new byte[8192];
int len;
while((len=in.read(buf))>0)
out.write(buf,0,len);
}
public static String[] tokenize(String s) {
StringTokenizer st = new StringTokenizer(s);
String[] a = new String[st.countTokens()];
for (int i = 0; st.hasMoreTokens(); i++)
a[i] = st.nextToken();
return a;
}
public static String[] mapToEnv(Map<?,?> m) {
String[] r = new String[m.size()];
int idx=0;
for (final Map.Entry e : m.entrySet()) {
r[idx++] = e.getKey().toString() + '=' + e.getValue().toString();
}
return r;
}
public static int min(int x, int... values) {
for (int i : values) {
if(i<x)
x=i;
}
return x;
}
public static String nullify(String v) {
if(v!=null && v.length()==0) v=null;
return v;
}
public static String toHexString(byte[] data, int start, int len) {
StringBuffer buf = new StringBuffer();
for( int i=0; i<len; i++ ) {
int b = data[start+i]&0xFF;
if(b<16) buf.append('0');
buf.append(Integer.toHexString(b));
}
return buf.toString();
}
public static String toHexString(byte[] bytes) {
return toHexString(bytes,0,bytes.length);
}
public static String getTimeSpanString(long duration) {
duration /= 1000;
if(duration<60)
return combine(duration,"second");
duration /= 60;
if(duration<60)
return combine(duration,"minute");
duration /= 60;
if(duration<24)
return combine(duration,"hour");
duration /= 24;
if(duration<30)
return combine(duration,"day");
duration /= 30;
if(duration<12)
return combine(duration,"month");
duration /= 12;
return combine(duration,"year");
}
/**
* Combines numer and unit, with a plural suffix if needed.
*/
public static String combine(long n, String suffix) {
String s = Long.toString(n)+' '+suffix;
if(n!=1)
s += 's';
return s;
}
/**
* Escapes non-ASCII characters.
*/
public static String encode(String s) {
try {
boolean escaped = false;
StringBuffer out = new StringBuffer(s.length());
ByteArrayOutputStream buf = new ByteArrayOutputStream();
OutputStreamWriter w = new OutputStreamWriter(buf,"UTF-8");
for (int i = 0; i < s.length(); i++) {
int c = (int) s.charAt(i);
if (c<128 && c!=' ') {
out.append((char) c);
} else {
// 1 char -> UTF8
w.write(c);
w.flush();
for (byte b : buf.toByteArray()) {
out.append('%');
out.append(toDigit((b >> 4) & 0xF));
out.append(toDigit(b & 0xF));
}
buf.reset();
escaped = true;
}
}
return escaped ? out.toString() : s;
} catch (IOException e) {
throw new Error(e); // impossible
}
}
private static char toDigit(int n) {
char ch = Character.forDigit(n,16);
if(ch>='a') ch = (char)(ch-'a'+'A');
return ch;
}
/**
* Creates an empty file.
*/
public static void touch(File file) throws IOException {
new FileOutputStream(file).close();
}
/**
* Copies a single file by using Ant.
*/
public static void copyFile(File src, File dst) throws BuildException {
Copy cp = new Copy();
cp.setProject(new org.apache.tools.ant.Project());
cp.setTofile(dst);
cp.setFile(src);
cp.setOverwrite(true);
cp.execute();
}
/**
* Convert null to "".
*/
public static String fixNull(String s) {
if(s==null) return "";
else return s;
}
/**
* Convert empty string to null.
*/
public static String fixEmpty(String s) {
if(s==null || s.length()==0) return null;
return s;
}
/**
* Cuts all the leading path portion and get just the file name.
*/
public static String getFileName(String filePath) {
int idx = filePath.lastIndexOf('\\');
if(idx>=0)
return getFileName(filePath.substring(idx+1));
idx = filePath.lastIndexOf('/');
if(idx>=0)
return getFileName(filePath.substring(idx+1));
return filePath;
}
public static final SimpleDateFormat XS_DATETIME_FORMATTER = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
static {
XS_DATETIME_FORMATTER.setTimeZone(new SimpleTimeZone(0,"GMT"));
}
private static final Logger LOGGER = Logger.getLogger(Util.class.getName());
}

View File

@ -0,0 +1,162 @@
package hudson;
import com.thoughtworks.xstream.converters.reflection.PureJavaReflectionProvider;
import com.thoughtworks.xstream.core.JVM;
import hudson.model.Hudson;
import hudson.model.User;
import hudson.triggers.Trigger;
import hudson.util.IncompatibleVMDetected;
import hudson.util.RingBufferLogHandler;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.util.Properties;
import java.util.TimerTask;
import java.util.logging.Logger;
import java.util.logging.Level;
/**
* Entry point when Hudson is used as a webapp.
*
* @author Kohsuke Kawaguchi
*/
public class WebAppMain implements ServletContextListener {
/**
* Creates the sole instance of {@link Hudson} and register it to the {@link ServletContext}.
*/
public void contextInitialized(ServletContextEvent event) {
installLogger();
File home = getHomeDir(event);
home.mkdirs();
System.out.println("hudson home directory: "+home);
ServletContext context = event.getServletContext();
// make sure that we are using XStream in the "enhenced" (JVM-specific) mode
if(new JVM().bestReflectionProvider().getClass()==PureJavaReflectionProvider.class) {
// nope
context.setAttribute("app",new IncompatibleVMDetected());
return;
}
// Tomcat breaks XSLT with JDK 5.0 and onward. Check if that's the case, and if so,
// try to correct it
try {
TransformerFactory.newInstance();
// if this works we are all happy
} catch (TransformerFactoryConfigurationError x) {
// no it didn't.
Logger logger = Logger.getLogger(WebAppMain.class.getName());
logger.log(Level.WARNING, "XSLT not configured correctly. Hudson will try to fix this. See http://issues.apache.org/bugzilla/show_bug.cgi?id=40895 for more details",x);
System.setProperty(TransformerFactory.class.getName(),"com.sun.org.apache.xalan.internal.xsltc.trax.TransformerFactoryImpl");
try {
TransformerFactory.newInstance();
logger.info("XSLT is set to the JAXP RI in JRE");
} catch(TransformerFactoryConfigurationError y) {
logger.log(Level.SEVERE, "Failed to correct the problem.");
}
}
try {
context.setAttribute("app",new Hudson(home,context));
} catch( IOException e ) {
throw new Error(e);
}
// set the version
Properties props = new Properties();
try {
InputStream is = getClass().getResourceAsStream("hudson-version.properties");
if(is!=null)
props.load(is);
} catch (IOException e) {
e.printStackTrace(); // if the version properties is missing, that's OK.
}
Object ver = props.get("version");
if(ver==null) ver="?";
context.setAttribute("version",ver);
Trigger.init(); // start running trigger
// trigger the loading of changelogs in the background,
// but give the system 10 seconds so that the first page
// can be served quickly
Trigger.timer.schedule(new TimerTask() {
public void run() {
User.get("nobody").getBuilds();
}
}, 1000*10);
}
/**
* Installs log handler to monitor all Hudson logs.
*/
private void installLogger() {
RingBufferLogHandler handler = new RingBufferLogHandler();
Hudson.logRecords = handler.getView();
Logger.getLogger("hudson").addHandler(handler);
}
/**
* Determines the home directory for Hudson.
*
* People makes configuration mistakes, so we are trying to be nice
* with those by doing {@link String#trim()}.
*/
private File getHomeDir(ServletContextEvent event) {
// check JNDI for the home directory first
try {
Context env = (Context) new InitialContext().lookup("java:comp/env");
String value = (String) env.lookup("HUDSON_HOME");
if(value!=null && value.trim().length()>0)
return new File(value.trim());
} catch (NamingException e) {
// ignore
}
// look at the env var next
String env = EnvVars.masterEnvVars.get("HUDSON_HOME");
if(env!=null)
return new File(env.trim());
// finally check the system property
String sysProp = System.getProperty("HUDSON_HOME");
if(sysProp!=null)
return new File(sysProp.trim());
// otherwise pick a place by ourselves
String root = event.getServletContext().getRealPath("/WEB-INF/workspace");
if(root!=null) {
File ws = new File(root.trim());
if(ws.exists())
// Hudson <1.42 used to prefer this betfore ~/.hudson, so
// check the existence and if it's there, use it.
// otherwise if this is a new installation, prefer ~/.hudson
return ws;
}
// if for some reason we can't put it within the webapp, use home directory.
return new File(new File(System.getProperty("user.home")),".hudson");
}
public void contextDestroyed(ServletContextEvent event) {
Hudson instance = Hudson.getInstance();
if(instance!=null)
instance.cleanUp();
}
}

View File

@ -0,0 +1,97 @@
package hudson;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.converters.ConversionException;
import com.thoughtworks.xstream.io.StreamException;
import com.thoughtworks.xstream.io.xml.XppReader;
import hudson.util.AtomicFileWriter;
import hudson.util.IOException2;
import hudson.util.XStream2;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
/**
* Represents an XML data file that Hudson uses as a data file.
*
* @author Kohsuke Kawaguchi
*/
public final class XmlFile {
private final XStream xs;
private final File file;
public XmlFile(File file) {
this(DEFAULT_XSTREAM,file);
}
public XmlFile(XStream xs, File file) {
this.xs = xs;
this.file = file;
}
/**
* Loads the contents of this file into a new object.
*/
public Object read() throws IOException {
Reader r = new BufferedReader(new InputStreamReader(new FileInputStream(file), "UTF-8"));
try {
return xs.fromXML(r);
} catch(StreamException e) {
throw new IOException2("Unable to read "+file,e);
} catch(ConversionException e) {
throw new IOException2("Unable to read "+file,e);
} finally {
r.close();
}
}
/**
* Loads the contents of this file into an existing object.
*/
public void unmarshal( Object o ) throws IOException {
Reader r = new BufferedReader(new InputStreamReader(new FileInputStream(file),"UTF-8"));
try {
xs.unmarshal(new XppReader(r),o);
} catch (StreamException e) {
throw new IOException2(e);
} catch(ConversionException e) {
throw new IOException2("Unable to read "+file,e);
} finally {
r.close();
}
}
public void write( Object o ) throws IOException {
AtomicFileWriter w = new AtomicFileWriter(file);
try {
w.write("<?xml version='1.0' encoding='UTF-8'?>\n");
xs.toXML(o,w);
w.commit();
} catch(StreamException e) {
throw new IOException2(e);
} finally {
w.close();
}
}
public boolean exists() {
return file.exists();
}
public void mkdirs() {
file.getParentFile().mkdirs();
}
public String toString() {
return file.toString();
}
/**
* {@link XStream} instance is supposed to be thread-safe.
*/
private static final XStream DEFAULT_XSTREAM = new XStream2();
}

View File

@ -0,0 +1,2 @@
# overwritten by the Ant task
version=development

View File

@ -0,0 +1,26 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* {@link ModelObject} with some convenience methods.
*
* @author Kohsuke Kawaguchi
*/
abstract class AbstractModelObject implements ModelObject {
/**
* Displays the error in a page.
*/
protected final void sendError(Exception e, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException {
sendError(e.getMessage(),req,rsp);
}
protected final void sendError(String message, StaplerRequest req, StaplerResponse rsp) throws ServletException, IOException {
req.setAttribute("message",message);
rsp.forward(this,"error",req);
}
}

View File

@ -0,0 +1,28 @@
package hudson.model;
import java.io.Serializable;
/**
* Contributes an item to the task list.
*
* @author Kohsuke Kawaguchi
*/
public interface Action extends Serializable, ModelObject {
/**
* Gets the file name of the icon (relative to /images/24x24)
*/
String getIconFileName();
/**
* Gets the string to be displayed.
*
* The convention is to capitalize the first letter of each word,
* such as "Test Result".
*/
String getDisplayName();
/**
* Gets the URL path name.
*/
String getUrlName();
}

View File

@ -0,0 +1,52 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import java.util.List;
import java.util.Vector;
/**
* {@link ModelObject} that can have additional {@link Action}s.
*
* @author Kohsuke Kawaguchi
*/
public abstract class Actionable extends AbstractModelObject {
/**
* Actions contributed to this model object.
*/
private List<Action> actions;
/**
* Gets actions contributed to this build.
*
* @return
* may be empty but never null.
*/
public synchronized List<Action> getActions() {
if(actions==null)
actions = new Vector<Action>();
return actions;
}
public Action getAction(int index) {
if(actions==null) return null;
return actions.get(index);
}
public <T extends Action> T getAction(Class<T> type) {
for (Action a : getActions()) {
if(type.isInstance(a))
return (T)a; // type.cast() not available in JDK 1.4
}
return null;
}
public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) {
for (Action a : getActions()) {
if(a.getUrlName().equals(token))
return a;
}
return null;
}
}

View File

@ -0,0 +1,428 @@
package hudson.model;
import hudson.Launcher;
import hudson.Proc;
import hudson.Util;
import static hudson.model.Hudson.isWindows;
import hudson.model.Fingerprint.RangeSet;
import hudson.model.Fingerprint.BuildPtr;
import hudson.scm.CVSChangeLogParser;
import hudson.scm.ChangeLogParser;
import hudson.scm.ChangeLogSet;
import hudson.scm.SCM;
import hudson.scm.ChangeLogSet.Entry;
import hudson.tasks.BuildStep;
import hudson.tasks.Builder;
import hudson.tasks.Publisher;
import hudson.tasks.Fingerprinter.FingerprintAction;
import hudson.tasks.test.AbstractTestResultAction;
import hudson.triggers.SCMTrigger;
import org.xml.sax.SAXException;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.util.Calendar;
import java.util.Map;
import java.util.HashMap;
import java.util.Collections;
/**
* @author Kohsuke Kawaguchi
*/
public final class Build extends Run<Project,Build> implements Runnable {
/**
* Name of the slave this project was built on.
* Null if built by the master.
*/
private String builtOn;
/**
* SCM used for this build.
* Maybe null, for historical reason, in which case CVS is assumed.
*/
private ChangeLogParser scm;
/**
* Changes in this build.
*/
private volatile transient ChangeLogSet<? extends Entry> changeSet;
/**
* Creates a new build.
*/
Build(Project project) throws IOException {
super(project);
}
public Project getProject() {
return getParent();
}
/**
* Loads a build from a log file.
*/
Build(Project project, File buildDir) throws IOException {
super(project,buildDir);
}
@Override
public boolean isKeepLog() {
// if any of the downstream project is configured with 'keep dependency component',
// we need to keep this log
for (Map.Entry<Project,RangeSet> e : getDownstreamBuilds().entrySet()) {
Project p = e.getKey();
if(!p.isKeepDependencies()) continue;
// is there any active build that depends on us?
for (Build build : p.getBuilds()) {
if(e.getValue().includes(build.getNumber()))
return true; // yep. an active build depends on us. can't recycle.
}
}
// TODO: report why the log is kept in UI
return super.isKeepLog();
}
/**
* Gets the changes incorporated into this build.
*
* @return never null.
*/
public ChangeLogSet<? extends Entry> getChangeSet() {
if(scm==null)
scm = new CVSChangeLogParser();
if(changeSet==null) // cached value
changeSet = calcChangeSet();
return changeSet;
}
private ChangeLogSet<? extends Entry> calcChangeSet() {
File changelogFile = new File(getRootDir(), "changelog.xml");
if(!changelogFile.exists())
return ChangeLogSet.EMPTY;
try {
return scm.parse(this,changelogFile);
} catch (IOException e) {
e.printStackTrace();
} catch (SAXException e) {
e.printStackTrace();
}
return ChangeLogSet.EMPTY;
}
public Calendar due() {
return timestamp;
}
/**
* Returns a {@link Slave} on which this build was done.
*/
public Node getBuiltOn() {
if(builtOn==null)
return Hudson.getInstance();
else
return Hudson.getInstance().getSlave(builtOn);
}
/**
* Returns the name of the slave it was built on, or null if it was the master.
*/
public String getBuiltOnStr() {
return builtOn;
}
/**
* Gets {@link AbstractTestResultAction} associated with this build if any.
*/
public AbstractTestResultAction getTestResultAction() {
return getAction(AbstractTestResultAction.class);
}
/**
* Gets the dependency relationship from this build (as the source)
* and that project (as the sink.)
*
* @return
* range of build numbers that represent which downstream builds are using this build.
* The range will be empty if no build of that project matches this.
*/
public RangeSet getDownstreamRelationship(Project that) {
RangeSet rs = new RangeSet();
FingerprintAction f = getAction(FingerprintAction.class);
if(f==null) return rs;
// look for fingerprints that point to this build as the source, and merge them all
for (Fingerprint e : f.getFingerprints().values()) {
BuildPtr o = e.getOriginal();
if(o!=null && o.is(this))
rs.add(e.getRangeSet(that));
}
return rs;
}
/**
* Gets the downstream builds of this build, which are the builds of the
* downstream project sthat use artifacts of this build.
*
* @return
* For each project with fingerprinting enabled, returns the range
* of builds (which can be empty if no build uses the artifact from this build.)
*/
public Map<Project,RangeSet> getDownstreamBuilds() {
Map<Project,RangeSet> r = new HashMap<Project,RangeSet>();
for (Project p : getParent().getDownstreamProjects()) {
if(p.isFingerprintConfigured())
r.put(p,getDownstreamRelationship(p));
}
return r;
}
/**
* Gets the dependency relationship from this build (as the sink)
* and that project (as the source.)
*
* @return
* Build number of the upstream build that feed into this build,
* or -1 if no record is avilable.
*/
public int getUpstreamRelationship(Project that) {
FingerprintAction f = getAction(FingerprintAction.class);
if(f==null) return -1;
int n = -1;
// look for fingerprints that point to the given project as the source, and merge them all
for (Fingerprint e : f.getFingerprints().values()) {
BuildPtr o = e.getOriginal();
if(o!=null && o.is(that))
n = Math.max(n,o.getNumber());
}
return n;
}
/**
* Gets the upstream builds of this build, which are the builds of the
* upstream projects whose artifacts feed into this build.
*/
public Map<Project,Integer> getUpstreamBuilds() {
Map<Project,Integer> r = new HashMap<Project,Integer>();
for (Project p : getParent().getUpstreamProjects()) {
int n = getUpstreamRelationship(p);
if(n>=0)
r.put(p,n);
}
return r;
}
/**
* Gets the changes in the dependency between the given build and this build.
*/
public Map<Project,DependencyChange> getDependencyChanges(Build from) {
if(from==null) return Collections.EMPTY_MAP; // make it easy to call this from views
FingerprintAction n = this.getAction(FingerprintAction.class);
FingerprintAction o = from.getAction(FingerprintAction.class);
if(n==null || o==null) return Collections.EMPTY_MAP;
Map<Project,Integer> ndep = n.getDependencies();
Map<Project,Integer> odep = o.getDependencies();
Map<Project,DependencyChange> r = new HashMap<Project,DependencyChange>();
for (Map.Entry<Project,Integer> entry : odep.entrySet()) {
Project p = entry.getKey();
Integer oldNumber = entry.getValue();
Integer newNumber = ndep.get(p);
if(newNumber!=null && oldNumber.compareTo(newNumber)<0) {
r.put(p,new DependencyChange(p,oldNumber,newNumber));
}
}
return r;
}
/**
* Represents a change in the dependency.
*/
public static final class DependencyChange {
/**
* The dependency project.
*/
public final Project project;
/**
* Version of the dependency project used in the previous build.
*/
public final int fromId;
/**
* {@link Build} object for {@link #fromId}. Can be null if the log is gone.
*/
public final Build from;
/**
* Version of the dependency project used in this build.
*/
public final int toId;
public final Build to;
public DependencyChange(Project project, int fromId, int toId) {
this.project = project;
this.fromId = fromId;
this.toId = toId;
this.from = project.getBuildByNumber(fromId);
this.to = project.getBuildByNumber(toId);
}
}
/**
* Performs a build.
*/
public void run() {
run(new Runner() {
/**
* Since configuration can be changed while a build is in progress,
* stick to one launcher and use it.
*/
private Launcher launcher;
public Result run(BuildListener listener) throws IOException {
Node node = Executor.currentExecutor().getOwner().getNode();
assert builtOn==null;
builtOn = node.getNodeName();
launcher = node.createLauncher(listener);
if(node instanceof Slave)
listener.getLogger().println("Building remotely on "+node.getNodeName());
if(!project.checkout(Build.this,launcher,listener,new File(getRootDir(),"changelog.xml")))
return Result.FAILURE;
SCM scm = project.getScm();
Build.this.scm = scm.createChangeLogParser();
Build.this.changeSet = Build.this.calcChangeSet();
if(!preBuild(listener,project.getBuilders()))
return Result.FAILURE;
if(!preBuild(listener,project.getPublishers()))
return Result.FAILURE;
if(!build(listener,project.getBuilders()))
return Result.FAILURE;
if(!isWindows()) {
try {
// ignore a failure.
new Proc(new String[]{"rm","../lastSuccessful"},new String[0],listener.getLogger(),getProject().getBuildDir()).join();
int r = new Proc(new String[]{
"ln","-s","builds/"+getId()/*ugly*/,"../lastSuccessful"},
new String[0],listener.getLogger(),getProject().getBuildDir()).join();
if(r!=0)
listener.getLogger().println("ln failed: "+r);
} catch (IOException e) {
PrintStream log = listener.getLogger();
log.println("ln failed");
Util.displayIOException(e,listener);
e.printStackTrace( log );
}
}
return Result.SUCCESS;
}
public void post(BuildListener listener) {
// run all of them even if one of them failed
for( Publisher bs : project.getPublishers().values() )
bs.perform(Build.this, launcher, listener);
}
private boolean build(BuildListener listener, Map<?, Builder> steps) {
for( Builder bs : steps.values() )
if(!bs.perform(Build.this, launcher, listener))
return false;
return true;
}
private boolean preBuild(BuildListener listener,Map<?,? extends BuildStep> steps) {
for( BuildStep bs : steps.values() )
if(!bs.prebuild(Build.this,listener))
return false;
return true;
}
});
}
@Override
protected void onStartBuilding() {
SCMTrigger t = (SCMTrigger)project.getTriggers().get(SCMTrigger.DESCRIPTOR);
if(t==null) {
super.onStartBuilding();
} else {
synchronized(t) {
try {
t.abort();
} catch (InterruptedException e) {
// handle the interrupt later
Thread.currentThread().interrupt();
}
super.onStartBuilding();
}
}
}
@Override
protected void onEndBuilding() {
SCMTrigger t = (SCMTrigger)project.getTriggers().get(SCMTrigger.DESCRIPTOR);
if(t==null) {
super.onEndBuilding();
} else {
synchronized(t) {
super.onEndBuilding();
t.startPolling();
}
}
}
@Override
public Map<String,String> getEnvVars() {
Map<String,String> env = super.getEnvVars();
JDK jdk = project.getJDK();
if(jdk !=null)
jdk.buildEnvVars(env);
project.getScm().buildEnvVars(env);
return env;
}
//
//
// actions
//
//
/**
* Stops this build if it's still going.
*
* If we use this/executor/stop URL, it causes 404 if the build is already killed,
* as {@link #getExecutor()} returns null.
*/
public synchronized void doStop( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
Executor e = getExecutor();
if(e!=null)
e.doStop(req,rsp);
else
// nothing is building
rsp.forwardToPreviousPage(req);
}
}

View File

@ -0,0 +1,19 @@
package hudson.model;
/**
* Receives events that happen during a build.
*
* @author Kohsuke Kawaguchi
*/
public interface BuildListener extends TaskListener {
/**
* Called when a build is started.
*/
void started();
/**
* Called when a build is finished.
*/
void finished(Result result);
}

View File

@ -0,0 +1,201 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import hudson.util.RunList;
/**
* Represents a set of {@link Executor}s on the same computer.
*
* <p>
* {@link Executor}s on one {@link Computer} is transparently interchangeable
* (that is the definition of {@link Computer}.)
*
* <p>
* This object is related to {@link Node} but they have some significant difference.
* {@link Computer} primarily works as a holder of {@link Executor}s, so
* if a {@link Node} is configured (probably temporarily) with 0 executors,
* you won't have a {@link Computer} object for it.
*
* Also, even if you remove a {@link Node}, it takes time for the corresponding
* {@link Computer} to be removed, if some builds are already in progress on that
* node.
*
* <p>
* This object also serves UI (since {@link Node} is an interface and can't have
* related side pages.)
*
* @author Kohsuke Kawaguchi
*/
public class Computer implements ModelObject {
private final List<Executor> executors = new ArrayList<Executor>();
private int numExecutors;
/**
* True if Hudson shouldn't start new builds on this node.
*/
private boolean temporarilyOffline;
/**
* {@link Node} object may be created and deleted independently
* from this object.
*/
private String nodeName;
public Computer(Node node) {
assert node.getNumExecutors()!=0 : "Computer created with 0 executors";
setNode(node);
}
/**
* Number of {@link Executor}s that are configured for this computer.
*
* <p>
* When this value is decreased, it is temporarily possible
* for {@link #executors} to have a larger number than this.
*/
// ugly name to let EL access this
public int getNumExecutors() {
return numExecutors;
}
/**
* Returns the {@link Node} that this computer represents.
*/
public Node getNode() {
if(nodeName==null)
return Hudson.getInstance();
return Hudson.getInstance().getSlave(nodeName);
}
public boolean isTemporarilyOffline() {
return temporarilyOffline;
}
public void setTemporarilyOffline(boolean temporarilyOffline) {
this.temporarilyOffline = temporarilyOffline;
Hudson.getInstance().getQueue().scheduleMaintenance();
}
public String getIcon() {
if(temporarilyOffline)
return "computer-x.gif";
else
return "computer.gif";
}
public String getDisplayName() {
return getNode().getNodeName();
}
public String getUrl() {
return "computer/"+getDisplayName()+"/";
}
/**
* Returns projects that are tied on this node.
*/
public List<Project> getTiedJobs() {
List<Project> r = new ArrayList<Project>();
for( Project p : Hudson.getInstance().getProjects() ) {
if(p.getAssignedNode()==getNode())
r.add(p);
}
return r;
}
/*package*/ void setNode(Node node) {
assert node!=null;
if(node instanceof Slave)
this.nodeName = node.getNodeName();
else
this.nodeName = null;
setNumExecutors(node.getNumExecutors());
}
/*package*/ void kill() {
setNumExecutors(0);
}
private synchronized void setNumExecutors(int n) {
this.numExecutors = n;
// send signal to all idle executors to potentially kill them off
for( Executor e : executors )
if(e.getCurrentBuild()==null)
e.interrupt();
// if the number is increased, add new ones
while(executors.size()<numExecutors)
executors.add(new Executor(this));
}
/**
* Returns the number of idle {@link Executor}s that can start working immediately.
*/
public synchronized int countIdle() {
int n = 0;
for (Executor e : executors) {
if(e.isIdle())
n++;
}
return n;
}
/**
* Gets the read-only view of all {@link Executor}s.
*/
public synchronized List<Executor> getExecutors() {
return new ArrayList<Executor>(executors);
}
/**
* Called by {@link Executor} to kill excessive executors from this computer.
*/
/*package*/ synchronized void removeExecutor(Executor e) {
executors.remove(e);
if(executors.isEmpty())
Hudson.getInstance().removeComputer(this);
}
/**
* Interrupt all {@link Executor}s.
*/
public synchronized void interrupt() {
for (Executor e : executors) {
e.interrupt();
}
}
//
//
// UI
//
//
public void doRssAll( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " all builds", new RunList(getTiedJobs()));
}
public void doRssFailed( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " failed builds", new RunList(getTiedJobs()).failureOnly());
}
private void rss(StaplerRequest req, StaplerResponse rsp, String suffix, RunList runs) throws IOException, ServletException {
RSS.forwardToRss(getDisplayName()+ suffix, getUrl(),
runs.newBuilds(), Run.FEED_ADAPTER, req, rsp );
}
public void doToggleOffline( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
setTemporarilyOffline(!temporarilyOffline);
rsp.forwardToPreviousPage(req);
}
}

View File

@ -0,0 +1,13 @@
package hudson.model;
/**
* Classes that are described by {@link Descriptor}.
*
* @author Kohsuke Kawaguchi
*/
public interface Describable<T extends Describable<T>> {
/**
* Gets the descriptor for this instance.
*/
Descriptor<T> getDescriptor();
}

View File

@ -0,0 +1,206 @@
package hudson.model;
import hudson.XmlFile;
import hudson.scm.CVSSCM;
import javax.servlet.http.HttpServletRequest;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import org.kohsuke.stapler.StaplerRequest;
/**
* Metadata about a configurable instance.
*
* <p>
* {@link Descriptor} is an object that has metadata about a {@link Describable}
* object, and also serves as a factory. A {@link Descriptor}/{@link Describable}
* combination is used throghout in Hudson to implement a
* configuration/extensibility mechanism.
*
* <p>
* For example, Take the CVS support as an example, which is implemented
* in {@link CVSSCM} class. Whenever a job is configured with CVS, a new
* {@link CVSSCM} instance is created with the per-job configuration
* information. This instance gets serialized to XML, and this instance
* will be called to perform CVS operations for that job. This is the job
* of {@link Describable} &mdash; each instance represents a specific
* configuration of the CVS support (branch, CVSROOT, etc.)
*
* <p>
* For Hudson to create such configured {@link CVSSCM} instance, Hudson
* needs another object that captures the metadata of {@link CVSSCM},
* and that is what a {@link Descriptor} is for. {@link CVSSCM} class
* has a singleton descriptor, and this descriptor helps render
* the configuration form, remember system-wide configuration (such as
* where <tt>cvs.exe</tt> is), and works as a factory.
*
* <p>
* {@link Descriptor} also usually have its associated views.
*
* @author Kohsuke Kawaguchi
* @see Describable
*/
public abstract class Descriptor<T extends Describable<T>> {
private Map<String,Object> properties;
/**
* The class being described by this descriptor.
*/
public final Class<? extends T> clazz;
protected Descriptor(Class<? extends T> clazz) {
this.clazz = clazz;
}
/**
* Human readable name of this kind of configurable object.
*/
public abstract String getDisplayName();
/**
* Creates a configured instance from the submitted form.
*
* <p>
* Hudson only invokes this method when the user wants an instance of <tt>T</tt>.
* So there's no need to check that in the implementation.
*
* @param req
* Always non-null. This object includes all the submitted form values.
*
* @throws FormException
* Signals a problem in the submitted form.
*/
public abstract T newInstance(StaplerRequest req) throws FormException;
/**
* Returns the resource path to the help screen HTML, if any.
*/
public String getHelpFile() {
return "";
}
/**
* Checks if the given object is created from this {@link Descriptor}.
*/
public final boolean isInstance( T instance ) {
return clazz.isInstance(instance);
}
/**
* Returns the data store that can be used to store configuration info.
*
* <p>
* The data store is local to each {@link Descriptor}.
*
* @return
* never return null.
*/
protected synchronized Map<String,Object> getProperties() {
if(properties==null)
properties = load();
return properties;
}
/**
* Invoked when the global configuration page is submitted.
*
* Can be overrided to store descriptor-specific information.
*
* @return false
* to keep the client in the same config page.
*/
public boolean configure( HttpServletRequest req ) throws FormException {
return true;
}
public final String getConfigPage() {
return '/'+clazz.getName().replace('.','/').replace('$','/')+"/config.jelly";
}
public final String getGlobalConfigPage() {
return '/'+clazz.getName().replace('.','/').replace('$','/')+"/global.jelly";
}
/**
* Saves the configuration info to the disk.
*/
protected synchronized void save() {
if(properties!=null)
try {
getConfigFile().write(properties);
} catch (IOException e) {
e.printStackTrace();
}
}
private Map<String,Object> load() {
// load
XmlFile file = getConfigFile();
if(!file.exists())
return new HashMap<String,Object>();
try {
return (Map<String,Object>)file.read();
} catch (IOException e) {
return new HashMap<String,Object>();
}
}
private XmlFile getConfigFile() {
return new XmlFile(new File(Hudson.getInstance().getRootDir(),clazz.getName()+".xml"));
}
// to work around warning when creating a generic array type
public static <T> T[] toArray( T... values ) {
return values;
}
public static <T> List<T> toList( T... values ) {
final ArrayList<T> r = new ArrayList<T>();
for (T v : values)
r.add(v);
return r;
}
public static <T extends Describable<T>>
Map<Descriptor<T>,T> toMap(List<T> describables) {
Map<Descriptor<T>,T> m = new LinkedHashMap<Descriptor<T>,T>();
for (T d : describables) {
m.put(d.getDescriptor(),d);
}
return m;
}
public static final class FormException extends Exception {
private final String formField;
public FormException(String message, String formField) {
super(message);
this.formField = formField;
}
public FormException(String message, Throwable cause, String formField) {
super(message, cause);
this.formField = formField;
}
public FormException(Throwable cause, String formField) {
super(cause);
this.formField = formField;
}
/**
* Which form field contained an error?
*/
public String getFormField() {
return formField;
}
}
}

View File

@ -0,0 +1,234 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileInputStream;
import java.io.FilenameFilter;
import java.io.IOException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.StringTokenizer;
/**
* Has convenience methods to serve file system.
*
* @author Kohsuke Kawaguchi
*/
public abstract class DirectoryHolder extends Actionable {
/**
* Serves a file from the file system (Maps the URL to a directory in a file system.)
*
* @param icon
* The icon file name, like "folder-open.gif"
* @param serveDirIndex
* True to generate the directory index.
* False to serve "index.html"
*/
protected final void serveFile(StaplerRequest req, StaplerResponse rsp, File root, String icon, boolean serveDirIndex) throws IOException, ServletException {
if(req.getQueryString()!=null) {
req.setCharacterEncoding("UTF-8");
String path = req.getParameter("path");
if(path!=null) {
rsp.sendRedirect(URLEncoder.encode(path,"UTF-8"));
return;
}
}
String path = req.getRestOfPath();
if(path.length()==0)
path = "/";
if(path.indexOf("..")!=-1 || path.length()<1) {
// don't serve anything other than files in the artifacts dir
rsp.sendError(HttpServletResponse.SC_BAD_REQUEST);
return;
}
File f = new File(root,path.substring(1));
boolean isFingerprint=false;
if(f.getName().equals("*fingerprint*")) {
f = f.getParentFile();
isFingerprint = true;
}
if(!f.exists()) {
rsp.sendError(HttpServletResponse.SC_NOT_FOUND);
return;
}
if(f.isDirectory()) {
if(!req.getRequestURL().toString().endsWith("/")) {
rsp.sendRedirect2(req.getRequestURL().append('/').toString());
return;
}
if(serveDirIndex) {
req.setAttribute("it",this);
List<Path> parentPaths = buildParentPath(path);
req.setAttribute("parentPath",parentPaths);
req.setAttribute("topPath",
parentPaths.isEmpty() ? "." : repeat("../",parentPaths.size()));
req.setAttribute("files",buildChildPathList(f));
req.setAttribute("icon",icon);
req.setAttribute("path",path);
req.getView(this,"dir.jelly").forward(req,rsp);
return;
} else {
f = new File(f,"index.html");
}
}
if(isFingerprint) {
FileInputStream in = new FileInputStream(f);
try {
Hudson hudson = Hudson.getInstance();
rsp.forward(hudson.getFingerprint(hudson.getDigestOf(in)),"/",req);
} finally {
in.close();
}
} else {
rsp.serveFile(req,f.toURL());
}
}
/**
* Builds a list of {@link Path} that represents ancestors
* from a string like "/foo/bar/zot".
*/
private List<Path> buildParentPath(String pathList) {
List<Path> r = new ArrayList<Path>();
StringTokenizer tokens = new StringTokenizer(pathList, "/");
int total = tokens.countTokens();
int current=1;
while(tokens.hasMoreTokens()) {
String token = tokens.nextToken();
r.add(new Path(repeat("../",total-current),token,true,0));
current++;
}
return r;
}
/**
* Builds a list of list of {@link Path}. The inner
* list of {@link Path} represents one child item to be shown
* (this mechanism is used to skip empty intermediate directory.)
*/
private List<List<Path>> buildChildPathList(File cur) {
List<List<Path>> r = new ArrayList<List<Path>>();
File[] files = cur.listFiles();
Arrays.sort(files,FILE_SORTER);
for( File f : files ) {
Path p = new Path(f.getName(),f.getName(),f.isDirectory(),f.length());
if(!f.isDirectory()) {
r.add(Collections.singletonList(p));
} else {
// find all empty intermediate directory
List<Path> l = new ArrayList<Path>();
l.add(p);
String relPath = f.getName();
while(true) {
// files that don't start with '.' qualify for 'meaningful files', nor SCM related files
File[] sub = f.listFiles(new FilenameFilter() {
public boolean accept(File dir, String name) {
return !name.startsWith(".") && !name.equals("CVS") && !name.equals(".svn");
}
});
if(sub.length!=1 || !sub[0].isDirectory())
break;
f = sub[0];
relPath += '/'+f.getName();
l.add(new Path(relPath,f.getName(),true,0));
}
r.add(l);
}
}
return r;
}
private static String repeat(String s,int times) {
StringBuffer buf = new StringBuffer(s.length()*times);
for(int i=0; i<times; i++ )
buf.append(s);
return buf.toString();
}
/**
* Represents information about one file or folder.
*/
public final class Path {
/**
* Relative URL to this path from the current page.
*/
private final String href;
/**
* Name of this path. Just the file name portion.
*/
private final String title;
private final boolean isFolder;
/**
* File size, or null if this is not a file.
*/
private final long size;
public Path(String href, String title, boolean isFolder, long size) {
this.href = href;
this.title = title;
this.isFolder = isFolder;
this.size = size;
}
public boolean isFolder() {
return isFolder;
}
public String getHref() {
return href;
}
public String getTitle() {
return title;
}
public String getIconName() {
return isFolder?"folder.gif":"text.gif";
}
public long getSize() {
return size;
}
}
private static final Comparator<File> FILE_SORTER = new Comparator<File>() {
public int compare(File lhs, File rhs) {
// directories first, files next
int r = dirRank(lhs)-dirRank(rhs);
if(r!=0) return r;
// otherwise alphabetical
return lhs.getName().compareTo(rhs.getName());
}
private int dirRank(File f) {
if(f.isDirectory()) return 0;
else return 1;
}
};
}

View File

@ -0,0 +1,121 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
/**
* Thread that executes builds.
*
* @author Kohsuke Kawaguchi
*/
public class Executor extends Thread {
private final Computer owner;
private final Queue queue;
private Build build;
private long startTime;
public Executor(Computer owner) {
super("Executor #"+owner.getExecutors().size()+" for "+owner.getDisplayName());
this.owner = owner;
this.queue = Hudson.getInstance().getQueue();
start();
}
public void run() {
while(true) {
if(Hudson.getInstance().isTerminating())
return;
synchronized(owner) {
if(owner.getNumExecutors()<owner.getExecutors().size()) {
// we've got too many executors.
owner.removeExecutor(this);
return;
}
}
try {
Project p = queue.pop();
build = p.newBuild();
} catch (InterruptedException e) {
continue;
} catch (IOException e) {
e.printStackTrace();
continue;
}
startTime = System.currentTimeMillis();
try {
build.run();
} catch (Throwable e) {
// for some reason the executor died. this is really
// a bug in the code, but we don't want the executor to die,
// so just leave some info and go on to build other things
e.printStackTrace();
}
build = null;
}
}
/**
* Returns the current {@link Build} this executor is running.
*
* @return
* null if the executor is idle.
*/
public Build getCurrentBuild() {
return build;
}
/**
* Returns true if this {@link Executor} is ready for action.
*/
public boolean isIdle() {
return build==null;
}
/**
* Returns the progress of the current build in the number between 0-100.
*
* @return -1
* if it's impossible to estimate the progress.
*/
public int getProgress() {
Build b = build.getProject().getLastSuccessfulBuild();
if(b==null) return -1;
long duration = b.getDuration();
if(duration==0) return -1;
int num = (int)((System.currentTimeMillis()-startTime)*100/duration);
if(num>=100) num=99;
return num;
}
/**
* Stops the current build.
*/
public void doStop( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
interrupt();
rsp.forwardToPreviousPage(req);
}
public Computer getOwner() {
return owner;
}
/**
* Returns the executor of the current thread.
*/
public static Executor currentExecutor() {
return (Executor)Thread.currentThread();
}
}

View File

@ -0,0 +1,78 @@
package hudson.model;
import hudson.model.RunMap.Constructor;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.util.logging.Logger;
/**
* Job that runs outside Hudson whose result is submitted to Hudson
* (either via web interface, or simply by placing files on the file system,
* for compatibility.)
*
* @author Kohsuke Kawaguchi
*/
public class ExternalJob extends ViewJob<ExternalJob,ExternalRun> {
public ExternalJob(Hudson parent,String name) {
super(parent,name);
}
@Override
protected void reload() {
this.runs.load(this,new Constructor<ExternalRun>() {
public ExternalRun create(File dir) throws IOException {
return new ExternalRun(ExternalJob.this,dir);
}
});
}
/**
* Creates a new build of this project for immediate execution.
*
* Needs to be synchronized so that two {@link #newBuild()} invocations serialize each other.
*/
public ExternalRun newBuild() throws IOException {
ExternalRun run = new ExternalRun(this);
runs.put(run);
return run;
}
/**
* Used to check if this is an external job and ready to accept a build result.
*/
public void doAcceptBuildResult( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rsp.setStatus(HttpServletResponse.SC_OK);
}
/**
* Used to post the build result from a remote machine.
*/
public void doPostBuildResult( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
ExternalRun run = newBuild();
run.acceptRemoteSubmission(req.getReader());
rsp.setStatus(HttpServletResponse.SC_OK);
}
private static final Logger logger = Logger.getLogger(ExternalJob.class.getName());
public JobDescriptor<ExternalJob,ExternalRun> getDescriptor() {
return DESCRIPTOR;
}
public static final JobDescriptor<ExternalJob,ExternalRun> DESCRIPTOR = new JobDescriptor<ExternalJob,ExternalRun>(ExternalJob.class) {
public String getDisplayName() {
return "Monitoring an external job";
}
public ExternalJob newInstance(String name) {
return new ExternalJob(Hudson.getInstance(),name);
}
};
}

View File

@ -0,0 +1,100 @@
package hudson.model;
import hudson.Proc;
import hudson.util.DecodingStream;
import hudson.util.DualOutputStream;
import org.xmlpull.mxp1.MXParser;
import org.xmlpull.v1.XmlPullParser;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.io.Reader;
/**
* {@link Run} for {@link ExternalJob}.
*
* @author Kohsuke Kawaguchi
*/
public class ExternalRun extends Run<ExternalJob,ExternalRun> {
/**
* Loads a run from a log file.
*/
ExternalRun(ExternalJob owner, File runDir) throws IOException {
super(owner,runDir);
}
/**
* Creates a new run.
*/
ExternalRun(ExternalJob project) throws IOException {
super(project);
}
/**
* Instead of performing a build, run the specified command,
* record the log and its exit code, then call it a build.
*/
public void run(final String[] cmd) {
run(new Runner() {
public Result run(BuildListener listener) throws Exception {
Proc proc = new Proc(cmd,getEnvVars(),System.in,new DualOutputStream(System.out,listener.getLogger()));
return proc.join()==0?Result.SUCCESS:Result.FAILURE;
}
public void post(BuildListener listener) {
// do nothing
}
});
}
/**
* Instead of performing a build, accept the log and the return code
* from a remote machine in an XML format of:
*
* <pre><xmp>
* <run>
* <log>...console output...</log>
* <result>exit code</result>
* </run>
* </xmp></pre>
*/
public void acceptRemoteSubmission(final Reader in) {
final long[] duration = new long[1];
run(new Runner() {
public Result run(BuildListener listener) throws Exception {
PrintStream logger = new PrintStream(new DecodingStream(listener.getLogger()));
XmlPullParser xpp = new MXParser();
xpp.setInput(in);
xpp.nextTag(); // get to the <run>
xpp.nextTag(); // get to the <log>
while(xpp.nextToken()!=XmlPullParser.END_TAG) {
int type = xpp.getEventType();
if(type==XmlPullParser.TEXT
|| type==XmlPullParser.CDSECT)
logger.print(xpp.getText());
}
xpp.nextTag(); // get to <result>
Result r = Integer.parseInt(xpp.nextText())==0?Result.SUCCESS:Result.FAILURE;
xpp.nextTag(); // get to <duration> (optional)
if(xpp.getEventType()==XmlPullParser.START_TAG
&& xpp.getName().equals("duration")) {
duration[0] = Long.parseLong(xpp.nextText());
}
return r;
}
public void post(BuildListener listener) {
// do nothing
}
});
if(duration[0]!=0)
super.duration = duration[0];
}
}

View File

@ -0,0 +1,534 @@
package hudson.model;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.converters.Converter;
import com.thoughtworks.xstream.converters.MarshallingContext;
import com.thoughtworks.xstream.converters.UnmarshallingContext;
import com.thoughtworks.xstream.converters.collections.CollectionConverter;
import com.thoughtworks.xstream.io.HierarchicalStreamReader;
import com.thoughtworks.xstream.io.HierarchicalStreamWriter;
import hudson.Util;
import hudson.XmlFile;
import hudson.util.HexBinaryConverter;
import hudson.util.XStream2;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Date;
import java.util.Hashtable;
import java.util.List;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A file being tracked by Hudson.
*
* @author Kohsuke Kawaguchi
*/
public class Fingerprint implements ModelObject {
/**
* Pointer to a {@link Build}.
*/
public static class BuildPtr {
final String name;
final int number;
public BuildPtr(String name, int number) {
this.name = name;
this.number = number;
}
public BuildPtr(Run run) {
this( run.getParent().getName(), run.getNumber() );
}
/**
* Gets the name of the job.
* <p>
* Such job could be since then removed,
* so there might not be a corresponding
* {@link Job}.
*/
public String getName() {
return name;
}
/**
* Gets the {@link Job} that this pointer points to,
* or null if such a job no longer exists.
*/
public Job getJob() {
return Hudson.getInstance().getJob(name);
}
/**
* Gets the project build number.
* <p>
* Such {@link Run} could be since then
* discarded.
*/
public int getNumber() {
return number;
}
/**
* Gets the {@link Job} that this pointer points to,
* or null if such a job no longer exists.
*/
public Run getRun() {
Job j = getJob();
if(j==null) return null;
return j.getBuildByNumber(number);
}
private boolean isAlive() {
return getRun()!=null;
}
/**
* Returns true if {@link BuildPtr} points to the given run.
*/
public boolean is(Run r) {
return r.getNumber()==number && r.getParent().getName().equals(name);
}
/**
* Returns true if {@link BuildPtr} points to the given job.
*/
public boolean is(Job job) {
return job.getName().equals(name);
}
}
/**
* Range of build numbers [start,end). Immutable.
*/
public static final class Range {
final int start;
final int end;
public Range(int start, int end) {
assert start<end;
this.start = start;
this.end = end;
}
public int getStart() {
return start;
}
public int getEnd() {
return end;
}
public boolean isSmallerThan(int i) {
return end<=i;
}
public boolean isBiggerThan(int i) {
return i<start;
}
public boolean includes(int i) {
return start<=i && i<end;
}
public Range expandRight() {
return new Range(start,end+1);
}
public Range expandLeft() {
return new Range(start-1,end);
}
public boolean isAdjacentTo(Range that) {
return this.end==that.start;
}
public String toString() {
return "["+start+","+end+")";
}
/**
* Returns true if two {@link Range}s can't be combined into a single range.
*/
public boolean isIndependent(Range that) {
return this.end<that.start ||that.end<this.start;
}
/**
* Returns the {@link Range} that combines two ranges.
*/
public Range combine(Range that) {
assert !isIndependent(that);
return new Range(
Math.min(this.start,that.start),
Math.max(this.end ,that.end ));
}
}
/**
* Set of {@link Range}s.
*/
public static final class RangeSet {
// sorted
private final List<Range> ranges;
public RangeSet() {
this(new ArrayList<Range>());
}
private RangeSet(List<Range> data) {
this.ranges = data;
}
/**
* Gets all the ranges.
*/
public synchronized List<Range> getRanges() {
return new ArrayList<Range>(ranges);
}
/**
* Expands the range set to include the given value.
* If the set already includes this number, this will be a no-op.
*/
public synchronized void add(int n) {
for( int i=0; i<ranges.size(); i++ ) {
Range r = ranges.get(i);
if(r.includes(n)) return; // already included
if(r.end==n) {
ranges.set(i,r.expandRight());
checkCollapse(i);
return;
}
if(r.start==n+1) {
ranges.set(i,r.expandLeft());
checkCollapse(i-1);
return;
}
if(r.isBiggerThan(n)) {
// needs to insert a single-value Range
ranges.add(i,new Range(n,n+1));
return;
}
}
ranges.add(new Range(n,n+1));
}
private void checkCollapse(int i) {
if(i<0 || i==ranges.size()-1) return;
Range lhs = ranges.get(i);
Range rhs = ranges.get(i+1);
if(lhs.isAdjacentTo(rhs)) {
// collapsed
Range r = new Range(lhs.start,rhs.end);
ranges.set(i,r);
ranges.remove(i+1);
}
}
public synchronized boolean includes(int i) {
for (Range r : ranges) {
if(r.includes(i))
return true;
}
return false;
}
public synchronized void add(RangeSet that) {
int lhs=0,rhs=0;
while(lhs<this.ranges.size() && rhs<that.ranges.size()) {
Range lr = this.ranges.get(lhs);
Range rr = that.ranges.get(rhs);
// no overlap
if(lr.end<rr.start) {
lhs++;
continue;
}
if(rr.end<lr.start) {
ranges.add(lhs,rr);
lhs++;
rhs++;
continue;
}
// overlap. merge two
Range m = lr.combine(rr);
rhs++;
// since ranges[lhs] is explanded, it might overlap with others in this.ranges
while(lhs+1<this.ranges.size() && !m.isIndependent(this.ranges.get(lhs+1))) {
m = m.combine(this.ranges.get(lhs+1));
this.ranges.remove(lhs+1);
}
this.ranges.set(lhs,m);
}
// if anything is left in that.ranges, add them all
this.ranges.addAll(that.ranges.subList(rhs,that.ranges.size()));
}
public synchronized String toString() {
StringBuffer buf = new StringBuffer();
for (Range r : ranges) {
if(buf.length()>0) buf.append(',');
buf.append(r);
}
return buf.toString();
}
public synchronized boolean isEmpty() {
return ranges.isEmpty();
}
/**
* Returns true if all the integers logically in this {@link RangeSet}
* is smaller than the given integer. For example, {[1,3)} is smaller than 3,
* but {[1,3),[100,105)} is not smaller than anything less than 105.
*
* Note that {} is smaller than any n.
*/
public synchronized boolean isSmallerThan(int n) {
if(ranges.isEmpty()) return true;
return ranges.get(ranges.size() - 1).isSmallerThan(n);
}
static final class ConverterImpl implements Converter {
private final Converter collectionConv; // used to convert ArrayList in it
public ConverterImpl(Converter collectionConv) {
this.collectionConv = collectionConv;
}
public boolean canConvert(Class type) {
return type==RangeSet.class;
}
public void marshal(Object source, HierarchicalStreamWriter writer, MarshallingContext context) {
collectionConv.marshal( ((RangeSet)source).getRanges(), writer, context );
}
public Object unmarshal(HierarchicalStreamReader reader, final UnmarshallingContext context) {
return new RangeSet((List<Range>)(collectionConv.unmarshal(reader,context)));
}
}
}
private final Date timestamp;
/**
* Null if this fingerprint is for a file that's
* apparently produced outside.
*/
private final BuildPtr original;
private final byte[] md5sum;
private final String fileName;
/**
* Range of builds that use this file keyed by a job name.
*/
private final Hashtable<String,RangeSet> usages = new Hashtable<String,RangeSet>();
public Fingerprint(Run build, String fileName, byte[] md5sum) throws IOException {
this.original = build==null ? null : new BuildPtr(build);
this.md5sum = md5sum;
this.fileName = fileName;
this.timestamp = new Date();
save();
}
/**
* The first build in which this file showed up,
* if the file looked like it's created there.
* <p>
* This is considered as the "source" of this file,
* or the owner, in the sense that this project "owns"
* this file.
*
* @return null
* if the file is apparently created outside Hudson.
*/
public BuildPtr getOriginal() {
return original;
}
public String getDisplayName() {
return fileName;
}
/**
* The file name (like "foo.jar" without path).
*/
public String getFileName() {
return fileName;
}
/**
* Gets the MD5 hash string.
*/
public String getHashString() {
return Util.toHexString(md5sum);
}
/**
* Gets the timestamp when this record is created.
*/
public Date getTimestamp() {
return timestamp;
}
/**
* Gets the string that says how long since this build has scheduled.
*
* @return
* string like "3 minutes" "1 day" etc.
*/
public String getTimestampString() {
long duration = System.currentTimeMillis()-timestamp.getTime();
return Util.getTimeSpanString(duration);
}
/**
* Gets the build range set for the given job name.
*
* <p>
* These builds of this job has used this file.
*/
public RangeSet getRangeSet(String jobName) {
RangeSet r = usages.get(jobName);
if(r==null) r = new RangeSet();
return r;
}
public RangeSet getRangeSet(Job job) {
return getRangeSet(job.getName());
}
/**
* Gets the sorted list of job names where this jar is used.
*/
public List<String> getJobs() {
List<String> r = new ArrayList<String>();
r.addAll(usages.keySet());
Collections.sort(r);
return r;
}
public Hashtable<String,RangeSet> getUsages() {
return usages;
}
public synchronized void add(Build b) throws IOException {
add(b.getParent().getName(),b.getNumber());
}
/**
* Records that a build of a job has used this file.
*/
public synchronized void add(String jobName, int n) throws IOException {
synchronized(usages) {
RangeSet r = usages.get(jobName);
if(r==null) {
r = new RangeSet();
usages.put(jobName,r);
}
r.add(n);
}
save();
}
/**
* Returns true if any of the builds recorded in this fingerprint
* is still retained.
*
* <p>
* This is used to find out old fingerprint records that can be removed
* without losing too much information.
*/
public synchronized boolean isAlive() {
if(original.isAlive())
return true;
for (Entry<String,RangeSet> e : usages.entrySet()) {
Job j = Hudson.getInstance().getJob(e.getKey());
if(j==null)
continue;
int oldest = j.getFirstBuild().getNumber();
if(!e.getValue().isSmallerThan(oldest))
return true;
}
return false;
}
/**
* Save the settings to a file.
*/
public synchronized void save() throws IOException {
XmlFile f = getConfigFile(getFingerprintFile(md5sum));
f.mkdirs();
f.write(this);
}
/**
* The file we save our configuration.
*/
private static XmlFile getConfigFile(File file) {
return new XmlFile(XSTREAM,file);
}
/**
* Determines the file name from md5sum.
*/
private static File getFingerprintFile(byte[] md5sum) {
assert md5sum.length==16;
return new File( Hudson.getInstance().getRootDir(),
"fingerprints/"+ Util.toHexString(md5sum,0,1)+'/'+Util.toHexString(md5sum,1,1)+'/'+Util.toHexString(md5sum,2,md5sum.length-2)+".xml");
}
/**
* Loads a {@link Fingerprint} from a file in the image.
*/
/*package*/ static Fingerprint load(byte[] md5sum) throws IOException {
return load(getFingerprintFile(md5sum));
}
/*package*/ static Fingerprint load(File file) throws IOException {
XmlFile configFile = getConfigFile(file);
if(!configFile.exists())
return null;
try {
return (Fingerprint)configFile.read();
} catch (IOException e) {
logger.log(Level.WARNING, "Failed to load "+configFile,e);
throw e;
}
}
private static final XStream XSTREAM = new XStream2();
static {
XSTREAM.alias("fingerprint",Fingerprint.class);
XSTREAM.alias("range",Range.class);
XSTREAM.alias("ranges",RangeSet.class);
XSTREAM.registerConverter(new HexBinaryConverter(),10);
XSTREAM.registerConverter(new RangeSet.ConverterImpl(
new CollectionConverter(XSTREAM.getClassMapper()) {
protected Object createCollection(Class type) {
return new ArrayList();
}
}
),10);
}
private static final Logger logger = Logger.getLogger(Fingerprint.class.getName());
}

View File

@ -0,0 +1,94 @@
package hudson.model;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.logging.Level;
import java.util.regex.Pattern;
/**
* Scans the fingerprint database and remove old records
* that are no longer relevant.
*
* <p>
* A {@link Fingerprint} is removed when none of the builds that
* it point to is available in the records.
*
* @author Kohsuke Kawaguchi
*/
public final class FingerprintCleanupThread extends PeriodicWork {
private static FingerprintCleanupThread theInstance;
public FingerprintCleanupThread() {
super("Fingerprint cleanup");
theInstance = this;
}
public static void invoke() {
theInstance.run();
}
protected void execute() {
int numFiles = 0;
File root = new File(Hudson.getInstance().getRootDir(),"fingerprints");
File[] files1 = root.listFiles(LENGTH2DIR_FILTER);
if(files1!=null) {
for (File file1 : files1) {
File[] files2 = file1.listFiles(LENGTH2DIR_FILTER);
for(File file2 : files2) {
File[] files3 = file2.listFiles(FINGERPRINTFILE_FILTER);
for(File file3 : files3) {
if(check(file3))
numFiles++;
}
deleteIfEmpty(file2);
}
deleteIfEmpty(file1);
}
}
logger.log(Level.INFO, "Cleaned up "+numFiles+" records");
}
/**
* Deletes a directory if it's empty.
*/
private void deleteIfEmpty(File dir) {
String[] r = dir.list();
if(r==null) return; // can happen in a rare occasion
if(r.length==0)
dir.delete();
}
/**
* Examines the file and returns true if a file was deleted.
*/
private boolean check(File fingerprintFile) {
try {
Fingerprint fp = Fingerprint.load(fingerprintFile);
if(!fp.isAlive()) {
fingerprintFile.delete();
return true;
}
} catch (IOException e) {
logger.log(Level.WARNING, "Failed to process "+fingerprintFile, e);
}
return false;
}
private static final FileFilter LENGTH2DIR_FILTER = new FileFilter() {
public boolean accept(File f) {
return f.isDirectory() && f.getName().length()==2;
}
};
private static final FileFilter FINGERPRINTFILE_FILTER = new FileFilter() {
private final Pattern PATTERN = Pattern.compile("[0-9a-f]{28}\\.xml");
public boolean accept(File f) {
return f.isFile() && PATTERN.matcher(f.getName()).matches();
}
};
}

View File

@ -0,0 +1,79 @@
package hudson.model;
import hudson.Util;
import java.io.File;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Map;
/**
* Cache of {@link Fingerprint}s.
*
* <p>
* This implementation makes sure that no two {@link Fingerprint} objects
* lie around for the same hash code, and that unused {@link Fingerprint}
* will be adequately GC-ed to prevent memory leak.
*
* @author Kohsuke Kawaguchi
*/
public final class FingerprintMap {
private final Map<String,WeakReference<Fingerprint>> core = new HashMap<String, WeakReference<Fingerprint>>();
/**
* Returns true if there's some data in the fingerprint database.
*/
public boolean isReady() {
return new File( Hudson.getInstance().getRootDir(),"fingerprints").exists();
}
/**
* @param build
* set to non-null if {@link Fingerprint} to be created (if so)
* will have this build as the owner. Otherwise null, to indicate
* an owner-less build.
*/
public synchronized Fingerprint getOrCreate(Build build, String fileName, byte[] md5sum) throws IOException {
return getOrCreate(build,fileName, Util.toHexString(md5sum));
}
public synchronized Fingerprint getOrCreate(Build build, String fileName, String md5sum) throws IOException {
assert build!=null;
assert fileName!=null;
Fingerprint fp = get(md5sum);
if(fp!=null)
return fp; // found it.
// not found. need to create one.
// creates a new one
fp = new Fingerprint(build,fileName,toByteArray(md5sum));
core.put(md5sum,new WeakReference<Fingerprint>(fp));
return fp;
}
public synchronized Fingerprint get(String md5sum) throws IOException {
if(md5sum.length()!=32)
return null; // illegal input
md5sum = md5sum.toLowerCase();
WeakReference<Fingerprint> wfp = core.get(md5sum);
if(wfp!=null) {
Fingerprint fp = wfp.get();
if(fp!=null)
return fp; // found it
}
return Fingerprint.load(toByteArray(md5sum));
}
private byte[] toByteArray(String md5sum) {
byte[] data = new byte[16];
for( int i=0; i<md5sum.length(); i+=2 )
data[i/2] = (byte)Integer.parseInt(md5sum.substring(i,i+2),16);
return data;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,79 @@
package hudson.model;
import hudson.EnvVars;
import java.io.File;
import java.util.Map;
/**
* Information about JDK installation.
*
* @author Kohsuke Kawaguchi
*/
public final class JDK {
private final String name;
private final String javaHome;
public JDK(String name, String javaHome) {
this.name = name;
this.javaHome = javaHome;
}
/**
* install directory.
*/
public String getJavaHome() {
return javaHome;
}
/**
* Human readable display name.
*/
public String getName() {
return name;
}
/**
* Gets the path to the bin directory.
*/
public File getBinDir() {
return new File(getJavaHome(),"bin");
}
/**
* Gets the path to 'java'.
*/
private File getExecutable() {
String execName;
if(File.separatorChar=='\\')
execName = "java.exe";
else
execName = "java";
return new File(getJavaHome(),"bin/"+execName);
}
/**
* Returns true if the executable exists.
*/
public boolean getExists() {
return getExecutable().exists();
}
/**
* Sets PATH and JAVA_HOME from this JDK.
*/
public void buildEnvVars(Map<String,String> env) {
String path = env.get("PATH");
if(path==null)
path = EnvVars.masterEnvVars.get("PATH");
if(path==null)
path = getBinDir().getPath();
else
path = getBinDir().getPath()+File.pathSeparator+path;
env.put("PATH",path);
env.put("JAVA_HOME",javaHome);
if(!env.containsKey("HUDSON_HOME"))
env.put("HUDSON_HOME", Hudson.getInstance().getRootDir().getPath() );
}
}

View File

@ -0,0 +1,661 @@
package hudson.model;
import com.thoughtworks.xstream.XStream;
import hudson.ExtensionPoint;
import hudson.Util;
import hudson.XmlFile;
import hudson.tasks.BuildTrigger;
import hudson.tasks.LogRotator;
import hudson.util.ChartUtil;
import hudson.util.DataSetBuilder;
import hudson.util.IOException2;
import hudson.util.RunList;
import hudson.util.ShiftedCategoryAxis;
import hudson.util.TextFile;
import hudson.util.XStream2;
import org.apache.tools.ant.taskdefs.Copy;
import org.apache.tools.ant.types.FileSet;
import org.jfree.chart.ChartFactory;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.CategoryAxis;
import org.jfree.chart.axis.CategoryLabelPositions;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.plot.CategoryPlot;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.renderer.AreaRendererEndType;
import org.jfree.chart.renderer.category.AreaRenderer;
import org.jfree.data.category.CategoryDataset;
import org.jfree.ui.RectangleInsets;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.awt.Color;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.SortedMap;
import java.util.Collections;
/**
* A job is an runnable entity under the monitoring of Hudson.
*
* <p>
* Every time it "runs", it will be recorded as a {@link Run} object.
*
* <p>
* To register a custom {@link Job} class from a plugin, add it to
* {@link Jobs#JOBS}. Also see {@link Job#XSTREAM}.
*
* @author Kohsuke Kawaguchi
*/
public abstract class Job<JobT extends Job<JobT,RunT>, RunT extends Run<JobT,RunT>>
extends DirectoryHolder implements Describable<Job<JobT,RunT>>, ExtensionPoint {
/**
* Project name.
*/
protected /*final*/ transient String name;
/**
* Project description. Can be HTML.
*/
protected String description;
/**
* Root directory for this job.
*/
protected transient File root;
/**
* Next bulid number.
* Kept in a separate file because this is the only information
* that gets updated often. This allows the rest of the configuration
* to be in the VCS.
* <p>
* In 1.28 and earlier, this field was stored in the project configuration file,
* so even though this is marked as transient, don't move it around.
*/
protected transient int nextBuildNumber = 1;
private transient Hudson parent;
private LogRotator logRotator;
private boolean keepDependencies;
protected Job(Hudson parent,String name) {
this.parent = parent;
doSetName(name);
getBuildDir().mkdirs();
}
/**
* Called when a {@link Job} is loaded from disk.
*/
protected void onLoad(Hudson root, String name) throws IOException {
this.parent = root;
doSetName(name);
TextFile f = getNextBuildNumberFile();
if(f.exists()) {
// starting 1.28, we store nextBuildNumber in a separate file.
// but old Hudson didn't do it, so if the file doesn't exist,
// assume that nextBuildNumber was read from config.xml
try {
this.nextBuildNumber = Integer.parseInt(f.readTrim());
} catch (NumberFormatException e) {
throw new IOException2(f+" doesn't contain a number",e);
}
} else {
// this must be the old Hudson. create this file now.
saveNextBuildNumber();
save(); // and delete it from the config.xml
}
}
/**
* Just update {@link #name} and {@link #root}, since they are linked.
*/
private void doSetName(String name) {
this.name = name;
this.root = new File(new File(parent.root,"jobs"),name);
}
public File getRootDir() {
return root;
}
private TextFile getNextBuildNumberFile() {
return new TextFile(new File(this.root,"nextBuildNumber"));
}
private void saveNextBuildNumber() throws IOException {
getNextBuildNumberFile().write(String.valueOf(nextBuildNumber)+'\n');
}
public final Hudson getParent() {
return parent;
}
public boolean isInQueue() {
return false;
}
/**
* If true, it will keep all the build logs of dependency components.
*/
public boolean isKeepDependencies() {
return keepDependencies;
}
/**
* Allocates a new buildCommand number.
*/
public synchronized int assignBuildNumber() throws IOException {
int r = nextBuildNumber++;
saveNextBuildNumber();
return r;
}
public int getNextBuildNumber() {
return nextBuildNumber;
}
/**
* Gets the project description HTML.
*/
public String getDescription() {
return description;
}
/**
* Sets the project description HTML.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Returns the log rotator for this job, or null if none.
*/
public LogRotator getLogRotator() {
return logRotator;
}
public void setLogRotator(LogRotator logRotator) {
this.logRotator = logRotator;
}
public String getName() {
return name;
}
public String getDisplayName() {
return getName();
}
/**
* Renames a job.
*/
public void renameTo(String newName) throws IOException {
// always synchronize from bigger objects first
synchronized(parent) {
synchronized(this) {
// sanity check
if(newName==null)
throw new IllegalArgumentException("New name is not given");
if(parent.getJob(newName)!=null)
throw new IllegalArgumentException("Job "+newName+" already exists");
// noop?
if(this.name.equals(newName))
return;
String oldName = this.name;
File oldRoot = this.root;
doSetName(newName);
File newRoot = this.root;
{// rename data files
boolean interrupted=false;
boolean renamed = false;
// try to rename the job directory.
// this may fail on Windows due to some other processes accessing a file.
// so retry few times before we fall back to copy.
for( int retry=0; retry<5; retry++ ) {
if(oldRoot.renameTo(newRoot)) {
renamed = true;
break; // succeeded
}
try {
Thread.sleep(500);
} catch (InterruptedException e) {
// process the interruption later
interrupted = true;
}
}
if(interrupted)
Thread.currentThread().interrupt();
if(!renamed) {
// failed to rename. it must be that some lengthy process is going on
// to prevent a rename operation. So do a copy. Ideally we'd like to
// later delete the old copy, but we can't reliably do so, as before the VM
// shuts down there might be a new job created under the old name.
Copy cp = new Copy();
cp.setProject(new org.apache.tools.ant.Project());
cp.setTodir(newRoot);
FileSet src = new FileSet();
src.setDir(getRootDir());
cp.addFileset(src);
cp.setOverwrite(true);
cp.setPreserveLastModified(true);
cp.setFailOnError(false); // keep going even if there's an error
cp.execute();
// try to delete as much as possible
try {
Util.deleteRecursive(oldRoot);
} catch (IOException e) {
// but ignore the error, since we expect that
e.printStackTrace();
}
}
}
parent.onRenamed(this,oldName,newName);
// update BuildTrigger of other projects that point to this object.
// can't we generalize this?
for( Project p : parent.getProjects() ) {
BuildTrigger t = (BuildTrigger) p.getPublishers().get(BuildTrigger.DESCRIPTOR);
if(t!=null) {
if(t.onJobRenamed(oldName,newName))
p.save();
}
}
}
}
}
/**
* Returns true if we should display "build now" icon
*/
public abstract boolean isBuildable();
/**
* Gets all the builds.
*
* @return
* never null. The first entry is the latest buildCommand.
*/
public List<RunT> getBuilds() {
return new ArrayList<RunT>(_getRuns().values());
}
/**
* Gets all the builds in a map.
*/
public SortedMap<Integer,RunT> getBuildsAsMap() {
return Collections.unmodifiableSortedMap(_getRuns());
}
/**
* @deprecated
* This is only used to support backward compatibility with
* old URLs.
*/
public RunT getBuild(String id) {
for (RunT r : _getRuns().values()) {
if(r.getId().equals(id))
return r;
}
return null;
}
/**
* @param n
* The build number.
* @see Run#getNumber()
*/
public RunT getBuildByNumber(int n) {
return _getRuns().get(n);
}
public Object getDynamic(String token, StaplerRequest req, StaplerResponse rsp) {
try {
// try to interpret the token as build number
return _getRuns().get(Integer.valueOf(token));
} catch (NumberFormatException e) {
return super.getDynamic(token,req,rsp);
}
}
/**
* The file we save our configuration.
*/
protected static XmlFile getConfigFile(File dir) {
return new XmlFile(XSTREAM,new File(dir,"config.xml"));
}
File getConfigFile() {
return new File(root,"config.xml");
}
/**
* Directory for storing {@link Run} records.
* <p>
* Some {@link Job}s may not have backing data store for {@link Run}s,
* but those {@link Job}s that use file system for storing data
* should use this directory for consistency.
*
* @see RunMap
*/
protected File getBuildDir() {
return new File(root,"builds");
}
/**
* Returns the URL of this project.
*/
public String getUrl() {
return "job/"+name+'/';
}
/**
* Gets all the runs.
*
* The resulting map must be immutable (by employing copy-on-write semantics.)
*/
protected abstract SortedMap<Integer,? extends RunT> _getRuns();
/**
* Called from {@link Run} to remove it from this job.
*
* The files are deleted already. So all the callee needs to do
* is to remove a reference from this {@link Job}.
*/
protected abstract void removeRun(RunT run);
/**
* Returns the last build.
*/
public RunT getLastBuild() {
SortedMap<Integer,? extends RunT> runs = _getRuns();
if(runs.isEmpty()) return null;
return runs.get(runs.firstKey());
}
/**
* Returns the oldest build in the record.
*/
public RunT getFirstBuild() {
SortedMap<Integer,? extends RunT> runs = _getRuns();
if(runs.isEmpty()) return null;
return runs.get(runs.lastKey());
}
/**
* Returns the last successful build, if any. Otherwise null.
*/
public RunT getLastSuccessfulBuild() {
RunT r = getLastBuild();
// temporary hack till we figure out what's causing this bug
while(r!=null && (r.isBuilding() || r.getResult()==null || r.getResult().isWorseThan(Result.UNSTABLE)))
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last stable build, if any. Otherwise null.
*/
public RunT getLastStableBuild() {
RunT r = getLastBuild();
while(r!=null && (r.isBuilding() || r.getResult().isWorseThan(Result.SUCCESS)))
r=r.getPreviousBuild();
return r;
}
/**
* Returns the last failed build, if any. Otherwise null.
*/
public RunT getLastFailedBuild() {
RunT r = getLastBuild();
while(r!=null && (r.isBuilding() || r.getResult()!=Result.FAILURE))
r=r.getPreviousBuild();
return r;
}
/**
* Used as the color of the status ball for the project.
*/
public String getIconColor() {
RunT lastBuild = getLastBuild();
while(lastBuild!=null && lastBuild.hasntStartedYet())
lastBuild = lastBuild.getPreviousBuild();
if(lastBuild!=null)
return lastBuild.getIconColor();
else
return "grey";
}
/**
* Save the settings to a file.
*/
public synchronized void save() throws IOException {
getConfigFile(root).write(this);
}
/**
* Loads a project from a config file.
*/
static Job load(Hudson root, File dir) throws IOException {
Job job = (Job)getConfigFile(dir).read();
job.onLoad(root,dir.getName());
return job;
}
//
//
// actions
//
//
/**
* Accepts submission from the configuration page.
*/
public synchronized void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
description = req.getParameter("description");
if(req.getParameter("logrotate")!=null)
logRotator = LogRotator.DESCRIPTOR.newInstance(req);
else
logRotator = null;
keepDependencies = req.getParameter("keepDependencies")!=null;
save();
String newName = req.getParameter("name");
if(newName!=null && !newName.equals(name)) {
rsp.sendRedirect("rename?newName="+newName);
} else {
rsp.sendRedirect(".");
}
}
/**
* Accepts the new description.
*/
public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
description = req.getParameter("description");
save();
rsp.sendRedirect("."); // go to the top page
}
/**
* Returns the image that shows the current buildCommand status.
*/
public void doBuildStatus( StaplerRequest req, StaplerResponse rsp ) throws IOException {
rsp.sendRedirect2(req.getContextPath()+"/nocacheImages/48x48/"+getBuildStatusUrl());
}
public String getBuildStatusUrl() {
return getIconColor()+".gif";
}
/**
* Returns the graph that shows how long each build took.
*/
public void doBuildTimeGraph( StaplerRequest req, StaplerResponse rsp ) throws IOException {
class Label implements Comparable<Label> {
private final Run run;
public Label(Run r) {
this.run = r;
}
public int compareTo(Label that) {
return this.run.number-that.run.number;
}
public boolean equals(Object o) {
Label that = (Label) o;
return run ==that.run;
}
public int hashCode() {
return run.hashCode();
}
public String toString() {
String l = run.getDisplayName();
if(run instanceof Build) {
String s = ((Build)run).getBuiltOnStr();
if(s!=null)
l += ' '+s;
}
return l;
}
}
DataSetBuilder<String,Label> data = new DataSetBuilder<String, Label>();
for( Run r : getBuilds() )
data.add( ((double)r.getDuration())/(1000*60), "mins", new Label(r));
ChartUtil.generateGraph(req,rsp,createChart(data.build()),500,200);
}
private JFreeChart createChart(CategoryDataset dataset) {
final JFreeChart chart = ChartFactory.createStackedAreaChart(
null, // chart title
null, // unused
"min", // range axis label
dataset, // data
PlotOrientation.VERTICAL, // orientation
false, // include legend
true, // tooltips
false // urls
);
chart.setBackgroundPaint(Color.white);
final CategoryPlot plot = chart.getCategoryPlot();
// plot.setAxisOffset(new Spacer(Spacer.ABSOLUTE, 5.0, 5.0, 5.0, 5.0));
plot.setBackgroundPaint(Color.WHITE);
plot.setOutlinePaint(null);
plot.setForegroundAlpha(0.8f);
// plot.setDomainGridlinesVisible(true);
// plot.setDomainGridlinePaint(Color.white);
plot.setRangeGridlinesVisible(true);
plot.setRangeGridlinePaint(Color.black);
CategoryAxis domainAxis = new ShiftedCategoryAxis(null);
plot.setDomainAxis(domainAxis);
domainAxis.setCategoryLabelPositions(CategoryLabelPositions.UP_90);
domainAxis.setLowerMargin(0.0);
domainAxis.setUpperMargin(0.0);
domainAxis.setCategoryMargin(0.0);
final NumberAxis rangeAxis = (NumberAxis) plot.getRangeAxis();
rangeAxis.setStandardTickUnits(NumberAxis.createIntegerTickUnits());
AreaRenderer ar = (AreaRenderer) plot.getRenderer();
ar.setEndType(AreaRendererEndType.TRUNCATE);
ar.setSeriesPaint(0,new Color(0x72,0x9F,0xCF));
// crop extra space around the graph
plot.setInsets(new RectangleInsets(0,0,0,5.0));
return chart;
}
/**
* Deletes this job.
*/
public synchronized void doDoDelete( StaplerRequest req, StaplerResponse rsp ) throws IOException {
if(!Hudson.adminCheck(req,rsp))
return;
Util.deleteRecursive(root);
getParent().deleteJob(this);
rsp.sendRedirect2(req.getContextPath()+"/");
}
/**
* Renames this job.
*/
public /*not synchronized. see renameTo()*/ void doDoRename( StaplerRequest req, StaplerResponse rsp ) throws IOException {
if(!Hudson.adminCheck(req,rsp))
return;
String newName = req.getParameter("newName");
renameTo(newName);
rsp.sendRedirect2(req.getContextPath()+'/'+getUrl()); // send to the new job page
}
public void doRssAll( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " all builds", new RunList(this));
}
public void doRssFailed( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " failed builds", new RunList(this).failureOnly());
}
private void rss(StaplerRequest req, StaplerResponse rsp, String suffix, RunList runs) throws IOException, ServletException {
RSS.forwardToRss(getDisplayName()+ suffix, getUrl(),
runs.newBuilds(), Run.FEED_ADAPTER, req, rsp );
}
/**
* Used to load/save job configuration.
*
* When you extend {@link Job} in a plugin, try to put the alias so
* that it produces a reasonable XML.
*/
protected static final XStream XSTREAM = new XStream2();
static {
XSTREAM.alias("project",Project.class);
}
}

View File

@ -0,0 +1,150 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Calendar;
import java.util.HashMap;
import java.util.Map;
import java.util.ArrayList;
import java.util.Collections;
import java.util.GregorianCalendar;
import hudson.scm.ChangeLogSet.Entry;
import hudson.Util;
import hudson.util.RunList;
/**
* Collection of {@link Job}s.
*
* @author Kohsuke Kawaguchi
*/
public abstract class JobCollection extends AbstractModelObject {
/**
* Gets all the jobs in this collection.
*/
public abstract Collection<Job> getJobs();
/**
* Checks if the job is in this collection.
*/
public abstract boolean containsJob(Job job);
/**
* Gets the name of all this collection.
*/
public abstract String getViewName();
/**
* Message displayed in the top page. Can be null. Includes HTML.
*/
public abstract String getDescription();
/**
* Returns the path relative to the context root.
*/
public abstract String getUrl();
public static final class UserInfo implements Comparable<UserInfo> {
private final User user;
private Calendar lastChange;
private Project project;
UserInfo(User user, Project p, Calendar lastChange) {
this.user = user;
this.project = p;
this.lastChange = lastChange;
}
public User getUser() {
return user;
}
public Calendar getLastChange() {
return lastChange;
}
public Project getProject() {
return project;
}
/**
* Returns a human-readable string representation of when this user was last active.
*/
public String getLastChangeTimeString() {
long duration = new GregorianCalendar().getTimeInMillis()-lastChange.getTimeInMillis();
return Util.getTimeSpanString(duration);
}
public String getTimeSortKey() {
return Util.XS_DATETIME_FORMATTER.format(lastChange.getTime());
}
public int compareTo(UserInfo that) {
return that.lastChange.compareTo(this.lastChange);
}
}
/**
* Gets the users that show up in the changelog of this job collection.
*/
public final List<UserInfo> getPeople() {
Map<User,UserInfo> users = new HashMap<User,UserInfo>();
for (Job job : getJobs()) {
if (job instanceof Project) {
Project p = (Project) job;
for (Build build : p.getBuilds()) {
for (Entry entry : build.getChangeSet()) {
User user = entry.getAuthor();
UserInfo info = users.get(user);
if(info==null)
users.put(user,new UserInfo(user,p,build.getTimestamp()));
else
if(info.getLastChange().before(build.getTimestamp())) {
info.project = p;
info.lastChange = build.getTimestamp();
}
}
}
}
}
List<UserInfo> r = new ArrayList<UserInfo>(users.values());
Collections.sort(r);
return r;
}
/**
* Creates a job in this collection.
*
* @return
* null if fails.
*/
public abstract Job doCreateJob( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException;
public void doRssAll( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " all builds", new RunList(getJobs()));
}
public void doRssFailed( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " failed builds", new RunList(getJobs()).failureOnly());
}
private void rss(StaplerRequest req, StaplerResponse rsp, String suffix, RunList runs) throws IOException, ServletException {
RSS.forwardToRss(getDisplayName()+ suffix, getUrl(),
runs.newBuilds(), Run.FEED_ADAPTER, req, rsp );
}
public static final Comparator<JobCollection> SORTER = new Comparator<JobCollection>() {
public int compare(JobCollection lhs, JobCollection rhs) {
return lhs.getViewName().compareTo(rhs.getViewName());
}
};
}

View File

@ -0,0 +1,27 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
/**
* {@link Descriptor} for {@link Job}s.
*
* @author Kohsuke Kawaguchi
*/
public abstract class JobDescriptor<J extends Job<J,R>,R extends Run<J,R>> extends Descriptor<Job<J,R>> {
protected JobDescriptor(Class<? extends Job<J,R>> clazz) {
super(clazz);
}
/**
* @deprecated
* This is not a valid operation for {@link Job}s.
*/
public Job<J,R> newInstance(StaplerRequest req) throws FormException {
throw new UnsupportedOperationException();
}
/**
* Creates a new {@link Job}.
*/
public abstract Job<J,R> newInstance(String name);
}

View File

@ -0,0 +1,26 @@
package hudson.model;
import java.util.List;
/**
* List of all installed {@link Job} types.
*
* @author Kohsuke Kawaguchi
*/
public class Jobs {
/**
* List of all installed job types.
*/
public static final List<JobDescriptor> JOBS = (List)Descriptor.toList(
Project.DESCRIPTOR,
ExternalJob.DESCRIPTOR
);
public static JobDescriptor getDescriptor(String displayName) {
for (JobDescriptor job : JOBS) {
if(job.getDisplayName().equals(displayName))
return job;
}
return null;
}
}

View File

@ -0,0 +1,159 @@
package hudson.model;
import hudson.util.CountingOutputStream;
import hudson.util.WriterOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.io.RandomAccessFile;
import java.io.Writer;
/**
* Represents a large text data.
*
* @author Kohsuke Kawaguchi
*/
public class LargeText {
private final File file;
private final boolean completed;
public LargeText(File file, boolean completed) {
this.file = file;
this.completed = completed;
}
/**
* Writes the tail portion of the file to the {@link Writer}.
*
* <p>
* The text file is assumed to be in the system default encoding.
*
* @param start
* The byte offset in the input file where the write operation starts.
*
* @return
* if the file is still being written, this method writes the file
* until the last newline character and returns the offset to start
* the next write operation.
*/
public long writeLogTo(long start, Writer w) throws IOException {
CountingOutputStream os = new CountingOutputStream(new WriterOutputStream(w));
RandomAccessFile f = new RandomAccessFile(file,"r");
f.seek(start);
if(completed) {
// write everything till EOF
byte[] buf = new byte[1024];
int sz;
while((sz=f.read(buf))>=0)
os.write(buf,0,sz);
} else {
ByteBuf buf = new ByteBuf(null,f);
HeadMark head = new HeadMark(buf);
TailMark tail = new TailMark(buf);
while(tail.moveToNextLine(f)) {
head.moveTo(tail,os);
}
head.finish(os);
}
f.close();
os.flush();
return os.getCount()+start;
}
/**
* Points to a byte in the buffer.
*/
private static class Mark {
protected ByteBuf buf;
protected int pos;
public Mark(ByteBuf buf) {
this.buf = buf;
}
}
/**
* Points to the start of the region that's not committed
* to the ouput yet.
*/
private static final class HeadMark extends Mark {
public HeadMark(ByteBuf buf) {
super(buf);
}
/**
* Moves this mark to 'that' mark, and writes the data
* to {@link OutputStream} if necessary.
*/
void moveTo(Mark that, OutputStream os) throws IOException {
while(this.buf!=that.buf) {
os.write(buf.buf,0,buf.size);
buf = buf.next;
pos = 0;
}
this.pos = that.pos;
}
void finish(OutputStream os) throws IOException {
os.write(buf.buf,0,pos);
}
}
/**
* Points to the end of the region.
*/
private static final class TailMark extends Mark {
public TailMark(ByteBuf buf) {
super(buf);
}
boolean moveToNextLine(RandomAccessFile f) throws IOException {
while(true) {
while(pos==buf.size) {
if(!buf.isFull()) {
// read until EOF
return false;
} else {
// read into the next buffer
buf = new ByteBuf(buf,f);
pos = 0;
}
}
byte b = buf.buf[pos++];
if(b=='\r' || b=='\n')
return true;
}
}
}
private static final class ByteBuf {
private final byte[] buf = new byte[1024];
private int size = 0;
private ByteBuf next;
public ByteBuf(ByteBuf previous, RandomAccessFile f) throws IOException {
if(previous!=null) {
assert previous.next==null;
previous.next = this;
}
while(!this.isFull()) {
int chunk = f.read(buf, size, buf.length - size);
if(chunk==-1)
return;
size+= chunk;
}
}
public boolean isFull() {
return buf.length==size;
}
}
}

View File

@ -0,0 +1,10 @@
package hudson.model;
/**
* A model object has a URL.
*
* @author Kohsuke Kawaguchi
*/
public interface ModelObject {
String getDisplayName();
}

View File

@ -0,0 +1,16 @@
package hudson.model;
/**
* @author Kohsuke Kawaguchi
*/
public class NoFingerprintMatch implements ModelObject {
private final String md5sum;
public NoFingerprintMatch(String md5sum) {
this.md5sum = md5sum;
}
public String getDisplayName() {
return md5sum;
}
}

View File

@ -0,0 +1,62 @@
package hudson.model;
import hudson.Launcher;
/**
* Commonality between {@link Slave} and master {@link Hudson}.
*
* @author Kohsuke Kawaguchi
*/
public interface Node {
/**
* Name of this node.
*
* @return
* "" if this is master
*/
String getNodeName();
/**
* Human-readable description of this node.
*/
String getNodeDescription();
/**
* Returns a {@link Launcher} for executing programs on this node.
*/
Launcher createLauncher(TaskListener listener);
/**
* Returns the number of {@link Executor}s.
*
* This may be different from <code>getExecutors().size()</code>
* because it takes time to adjust the number of executors.
*/
int getNumExecutors();
/**
* Returns true if this node is only available
* for those jobs that exclusively specifies this node
* as the assigned node.
*/
Mode getMode();
public enum Mode {
NORMAL("Utilize this slave as much as possible"),
EXCLUSIVE("Leave this machine for tied jobs only");
private final String description;
public String getDescription() {
return description;
}
public String getName() {
return name();
}
Mode(String description) {
this.description = description;
}
}
}

View File

@ -0,0 +1,53 @@
package hudson.model;
import java.util.TimerTask;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Abstract base class for a periodic work.
*
* @author Kohsuke Kawaguchi
*/
abstract class PeriodicWork extends TimerTask {
/**
* Name of the work.
*/
private final String name;
private Thread thread;
protected final Logger logger = Logger.getLogger(getClass().getName());
protected PeriodicWork(String name) {
this.name = name;
}
/**
* Schedules this periodic work now in a new thread, if one isn't already running.
*/
public final void run() {
try {
if(thread!=null && thread.isAlive()) {
logger.log(Level.INFO, name+" thread is still running. Execution aborted.");
return;
}
thread = new Thread(new Runnable() {
public void run() {
logger.log(Level.INFO, "Started "+name);
long startTime = System.currentTimeMillis();
execute();
logger.log(Level.INFO, "Finished "+name+". "+
(System.currentTimeMillis()-startTime)+" ms");
}
},name+" thread");
thread.start();
} catch (Throwable t) {
logger.log(Level.SEVERE, name+" thread failed with error", t);
}
}
protected abstract void execute();
}

View File

@ -0,0 +1,730 @@
package hudson.model;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.Descriptor.FormException;
import hudson.model.Fingerprint.RangeSet;
import hudson.model.RunMap.Constructor;
import hudson.scm.NullSCM;
import hudson.scm.SCM;
import hudson.scm.SCMS;
import hudson.tasks.BuildStep;
import hudson.tasks.BuildTrigger;
import hudson.tasks.Builder;
import hudson.tasks.Fingerprinter;
import hudson.tasks.Publisher;
import hudson.tasks.test.AbstractTestResultAction;
import hudson.triggers.Trigger;
import hudson.triggers.Triggers;
import org.kohsuke.stapler.Ancestor;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.StringTokenizer;
import java.util.TreeMap;
import java.util.Vector;
/**
* Buildable software project.
*
* @author Kohsuke Kawaguchi
*/
public class Project extends Job<Project,Build> {
/**
* All the builds keyed by their build number.
*/
private transient /*almost final*/ RunMap<Build> builds = new RunMap<Build>();
private SCM scm = new NullSCM();
/**
* List of all {@link Trigger}s for this project.
*/
private List<Trigger> triggers = new Vector<Trigger>();
/**
* List of active {@link Builder}s configured for this project.
*/
private List<Builder> builders = new Vector<Builder>();
/**
* List of active {@link Publisher}s configured for this project.
*/
private List<Publisher> publishers = new Vector<Publisher>();
/**
* {@link Action}s contributed from {@link #triggers}, {@link #builders},
* and {@link #publishers}.
*
* We don't want to persist them separately, and these actions
* come and go as configuration change, so it's kept separate.
*/
private transient /*final*/ List<Action> transientActions = new Vector<Action>();
/**
* Identifies {@link JDK} to be used.
* Null if no explicit configuration is required.
*
* <p>
* Can't store {@link JDK} directly because {@link Hudson} and {@link Project}
* are saved independently.
*
* @see Hudson#getJDK(String)
*/
private String jdk;
/**
* The quiet period. Null to delegate to the system default.
*/
private Integer quietPeriod = null;
/**
* If this project is configured to be only built on a certain node,
* this value will be set to that node. Null to indicate the affinity
* with the master node.
*
* see #canRoam
*/
private String assignedNode;
/**
* True if this project can be built on any node.
*
* <p>
* This somewhat ugly flag combination is so that we can migrate
* existing Hudson installations nicely.
*/
private boolean canRoam;
/**
* True to suspend new builds.
*/
private boolean disabled;
/**
* Creates a new project.
*/
public Project(Hudson parent,String name) {
super(parent,name);
if(!parent.getSlaves().isEmpty()) {
// if a new job is configured with Hudson that already has slave nodes
// make it roamable by default
canRoam = true;
}
}
/**
* If this project is configured to be always built on this node,
* return that {@link Node}. Otherwise null.
*/
public Node getAssignedNode() {
if(canRoam)
return null;
if(assignedNode ==null)
return Hudson.getInstance();
return getParent().getSlave(assignedNode);
}
public JDK getJDK() {
return getParent().getJDK(jdk);
}
public int getQuietPeriod() {
return quietPeriod!=null ? quietPeriod : getParent().getQuietPeriod();
}
// ugly name because of EL
public boolean getHasCustomQuietPeriod() {
return quietPeriod!=null;
}
protected void onLoad(Hudson root, String name) throws IOException {
super.onLoad(root, name);
if(triggers==null)
// it didn't exist in < 1.28
triggers = new Vector<Trigger>();
this.builds = new RunMap<Build>();
this.builds.load(this,new Constructor<Build>() {
public Build create(File dir) throws IOException {
return new Build(Project.this,dir);
}
});
for (Trigger t : triggers)
t.start(this);
updateTransientActions();
}
public boolean isBuildable() {
return !isDisabled();
}
public boolean isDisabled() {
return disabled;
}
public SCM getScm() {
return scm;
}
public void setScm(SCM scm) {
this.scm = scm;
}
@Override
public String getIconColor() {
if(isDisabled())
// use grey to indicate that the build is disabled
return "grey";
else
return super.getIconColor();
}
public synchronized Map<Descriptor<Trigger>,Trigger> getTriggers() {
return Descriptor.toMap(triggers);
}
public synchronized Map<Descriptor<Builder>,Builder> getBuilders() {
return Descriptor.toMap(builders);
}
public synchronized Map<Descriptor<Publisher>,Publisher> getPublishers() {
return Descriptor.toMap(publishers);
}
/**
* Adds a new {@link BuildStep} to this {@link Project} and saves the configuration.
*/
private synchronized void addPublisher(Publisher buildStep) throws IOException {
for( int i=0; i<publishers.size(); i++ ) {
if(publishers.get(i).getDescriptor()==buildStep.getDescriptor()) {
// replace
publishers.set(i,buildStep);
save();
return;
}
}
// add
publishers.add(buildStep);
save();
}
/**
* Removes a publisher from this project, if it's active.
*/
private void removePublisher(Descriptor<Publisher> descriptor) throws IOException {
for( int i=0; i<publishers.size(); i++ ) {
if(publishers.get(i).getDescriptor()==descriptor) {
// found it
publishers.remove(i);
save();
return;
}
}
}
public SortedMap<Integer, ? extends Build> _getRuns() {
return builds.getView();
}
public void removeRun(Build run) {
this.builds.remove(run);
}
/**
* Creates a new build of this project for immediate execution.
*/
public Build newBuild() throws IOException {
Build lastBuild = new Build(this);
builds.put(lastBuild);
return lastBuild;
}
public boolean checkout(Build build, Launcher launcher, BuildListener listener, File changelogFile) throws IOException {
if(scm==null)
return true; // no SCM
FilePath workspace = getWorkspace();
workspace.mkdirs();
return scm.checkout(build, launcher, workspace, listener, changelogFile);
}
/**
* Checks if there's any update in SCM, and returns true if any is found.
*
* <p>
* The caller is responsible for coordinating the mutual exclusion between
* a build and polling, as both touches the workspace.
*/
public boolean pollSCMChanges( TaskListener listener ) {
if(scm==null) {
listener.getLogger().println("No SCM");
return false; // no SCM
}
FilePath workspace = getWorkspace();
if(!workspace.exists()) {
// no workspace. build now, or nothing will ever be built
listener.getLogger().println("No workspace is available, so can't check for updates.");
listener.getLogger().println("Scheduling a new build to get a workspace.");
return true;
}
try {
// TODO: do this by using the right slave
return scm.pollChanges(this, new Launcher(listener), workspace, listener );
} catch (IOException e) {
e.printStackTrace(listener.fatalError(e.getMessage()));
return false;
}
}
/**
* Gets the {@link Node} where this project was last built on.
*
* @return
* null if no information is available (for example,
* if no build was done yet.)
*/
public Node getLastBuiltOn() {
// where was it built on?
Build b = getLastBuild();
if(b==null)
return null;
else
return b.getBuiltOn();
}
/**
* Gets the directory where the module is checked out.
*/
public FilePath getWorkspace() {
Node node = getLastBuiltOn();
if(node==null)
node = getParent();
if(node instanceof Slave)
return ((Slave)node).getWorkspaceRoot().child(getName());
else
return new FilePath(new File(getRootDir(),"workspace"));
}
/**
* Returns the root directory of the checked-out module.
*
* @return
* When running remotely, this returns a remote fs directory.
*/
public FilePath getModuleRoot() {
return getScm().getModuleRoot(getWorkspace());
}
/**
* Gets the dependency relationship map between this project (as the source)
* and that project (as the sink.)
*
* @return
* can be empty but not null. build number of this project to the build
* numbers of that project.
*/
public SortedMap<Integer,RangeSet> getRelationship(Project that) {
TreeMap<Integer,RangeSet> r = new TreeMap<Integer,RangeSet>(REVERSE_INTEGER_COMPARATOR);
checkAndRecord(that, r, this.getBuilds());
// checkAndRecord(that, r, that.getBuilds());
return r;
}
public List<Project> getDownstreamProjects() {
BuildTrigger buildTrigger = (BuildTrigger) getPublishers().get(BuildTrigger.DESCRIPTOR);
if(buildTrigger==null)
return new ArrayList<Project>();
else
return buildTrigger.getChildProjects();
}
public List<Project> getUpstreamProjects() {
List<Project> r = new ArrayList<Project>();
for( Project p : Hudson.getInstance().getProjects() ) {
synchronized(p) {
for (BuildStep step : p.publishers) {
if (step instanceof BuildTrigger) {
BuildTrigger trigger = (BuildTrigger) step;
if(trigger.getChildProjects().contains(this))
r.add(p);
}
}
}
}
return r;
}
/**
* Helper method for getDownstreamRelationship.
*
* For each given build, find the build number range of the given project and put that into the map.
*/
private void checkAndRecord(Project that, TreeMap<Integer, RangeSet> r, Collection<? extends Build> builds) {
for (Build build : builds) {
RangeSet rs = build.getDownstreamRelationship(that);
if(rs==null || rs.isEmpty())
continue;
int n = build.getNumber();
RangeSet value = r.get(n);
if(value==null)
r.put(n,rs);
else
value.add(rs);
}
}
/**
* Schedules a build of this project.
*/
public void scheduleBuild() {
if(!disabled)
getParent().getQueue().add(this);
}
/**
* Returns true if the build is in the queue.
*/
@Override
public boolean isInQueue() {
return getParent().getQueue().contains(this);
}
/**
* Schedules the SCM polling. If a polling is already in progress
* or a build is in progress, polling will take place after that.
* Otherwise the polling will be started immediately on a separate thread.
*
* <p>
* In any case this method returns immediately.
*/
public void scheduleSCMPolling() {
// TODO
}
/**
* Returns true if the fingerprint record is configured in this project.
*/
public boolean isFingerprintConfigured() {
synchronized(publishers) {
for (Publisher p : publishers) {
if(p instanceof Fingerprinter)
return true;
}
}
return false;
}
//
//
// actions
//
//
/**
* Schedules a new build command.
*/
public void doBuild( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
scheduleBuild();
rsp.forwardToPreviousPage(req);
}
/**
* Cancels a scheduled build.
*/
public void doCancelQueue( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
getParent().getQueue().cancel(this);
rsp.forwardToPreviousPage(req);
}
/**
* Accepts submission from the configuration page.
*/
public void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
Set<Project> upstream = Collections.EMPTY_SET;
synchronized(this) {
try {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
int scmidx = Integer.parseInt(req.getParameter("scm"));
scm = SCMS.SCMS.get(scmidx).newInstance(req);
disabled = req.getParameter("disable")!=null;
jdk = req.getParameter("jdk");
if(req.getParameter("hasCustomQuietPeriod")!=null) {
quietPeriod = Integer.parseInt(req.getParameter("quiet_period"));
} else {
quietPeriod = null;
}
if(req.getParameter("hasSlaveAffinity")!=null) {
canRoam = false;
assignedNode = req.getParameter("slave");
if(assignedNode !=null) {
if(Hudson.getInstance().getSlave(assignedNode)==null) {
assignedNode = null; // no such slave
}
}
} else {
canRoam = true;
assignedNode = null;
}
buildDescribable(req, BuildStep.BUILDERS, builders, "builder");
buildDescribable(req, BuildStep.PUBLISHERS, publishers, "publisher");
for (Trigger t : triggers)
t.stop();
buildDescribable(req, Triggers.TRIGGERS, triggers, "trigger");
for (Trigger t : triggers)
t.start(this);
updateTransientActions();
super.doConfigSubmit(req,rsp);
} catch (FormException e) {
sendError(e,req,rsp);
}
}
if(req.getParameter("pseudoUpstreamTrigger")!=null) {
upstream = new HashSet<Project>(Project.fromNameList(req.getParameter("upstreamProjects")));
}
// this needs to be done after we release the lock on this,
// or otherwise we could dead-lock
for (Project p : Hudson.getInstance().getProjects()) {
boolean isUpstream = upstream.contains(p);
synchronized(p) {
List<Project> newChildProjects = p.getDownstreamProjects();
if(isUpstream) {
if(!newChildProjects.contains(this))
newChildProjects.add(this);
} else {
newChildProjects.remove(this);
}
if(newChildProjects.isEmpty()) {
p.removePublisher(BuildTrigger.DESCRIPTOR);
} else {
p.addPublisher(new BuildTrigger(newChildProjects));
}
}
}
}
private void updateTransientActions() {
if(transientActions==null)
transientActions = new Vector<Action>(); // happens when loaded from disk
synchronized(transientActions) {
transientActions.clear();
for (BuildStep step : builders) {
Action a = step.getProjectAction(this);
if(a!=null)
transientActions.add(a);
}
for (BuildStep step : publishers) {
Action a = step.getProjectAction(this);
if(a!=null)
transientActions.add(a);
}
for (Trigger trigger : triggers) {
Action a = trigger.getProjectAction();
if(a!=null)
transientActions.add(a);
}
}
}
public synchronized List<Action> getActions() {
// add all the transient actions, too
List<Action> actions = new Vector<Action>(super.getActions());
actions.addAll(transientActions);
return actions;
}
public List<ProminentProjectAction> getProminentActions() {
List<Action> a = getActions();
List<ProminentProjectAction> pa = new Vector<ProminentProjectAction>();
for (Action action : a) {
if(action instanceof ProminentProjectAction)
pa.add((ProminentProjectAction) action);
}
return pa;
}
private <T extends Describable<T>> void buildDescribable(StaplerRequest req, List<Descriptor<T>> descriptors, List<T> result, String prefix)
throws FormException {
result.clear();
for( int i=0; i< descriptors.size(); i++ ) {
if(req.getParameter(prefix +i)!=null) {
T instance = descriptors.get(i).newInstance(req);
result.add(instance);
}
}
}
/**
* Serves the workspace files.
*/
public void doWs( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
File dir = getWorkspace().getLocal();
if(!dir.exists()) {
// if there's no workspace, report a nice error message
rsp.forward(this,"noWorkspace",req);
} else {
serveFile(req, rsp, dir, "folder.gif", true);
}
}
/**
* Display the test result trend.
*/
public void doTestResultTrend( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
Build b = getLastSuccessfulBuild();
if(b!=null) {
AbstractTestResultAction a = b.getTestResultAction();
if(a!=null) {
a.doGraph(req,rsp);
return;
}
}
// error
rsp.setStatus(HttpServletResponse.SC_NOT_FOUND);
}
/**
* Changes the test result report display mode.
*/
public void doFlipTestResultTrend( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
boolean failureOnly = false;
// check the current preference value
Cookie[] cookies = req.getCookies();
if(cookies!=null) {
for (Cookie cookie : cookies) {
if(cookie.getName().equals(FAILURE_ONLY_COOKIE))
failureOnly = Boolean.parseBoolean(cookie.getValue());
}
}
// flip!
failureOnly = !failureOnly;
// set the updated value
Cookie cookie = new Cookie(FAILURE_ONLY_COOKIE,String.valueOf(failureOnly));
List anc = req.getAncestors();
Ancestor a = (Ancestor) anc.get(anc.size()-1); // last
cookie.setPath(a.getUrl()); // just for this chart
cookie.setMaxAge(Integer.MAX_VALUE);
rsp.addCookie(cookie);
// back to the project page
rsp.sendRedirect(".");
}
/**
* @deprecated
* left for legacy config file compatibility
*/
private transient String slave;
private static final String FAILURE_ONLY_COOKIE = "TestResultAction_failureOnly";
/**
* Converts a list of projects into a camma-separated names.
*/
public static String toNameList(Collection<? extends Project> projects) {
StringBuilder buf = new StringBuilder();
for (Project project : projects) {
if(buf.length()>0)
buf.append(", ");
buf.append(project.getName());
}
return buf.toString();
}
/**
* Does the opposite of {@link #toNameList(Collection)}.
*/
public static List<Project> fromNameList(String list) {
Hudson hudson = Hudson.getInstance();
List<Project> r = new ArrayList<Project>();
StringTokenizer tokens = new StringTokenizer(list,",");
while(tokens.hasMoreTokens()) {
String projectName = tokens.nextToken().trim();
Job job = hudson.getJob(projectName);
if(!(job instanceof Project)) {
continue; // ignore this token
}
r.add((Project) job);
}
return r;
}
private static final Comparator<Integer> REVERSE_INTEGER_COMPARATOR = new Comparator<Integer>() {
public int compare(Integer o1, Integer o2) {
return o2-o1;
}
};
public JobDescriptor<Project,Build> getDescriptor() {
return DESCRIPTOR;
}
public static final JobDescriptor<Project,Build> DESCRIPTOR = new JobDescriptor<Project,Build>(Project.class) {
public String getDisplayName() {
return "Building a software project";
}
public Project newInstance(String name) {
return new Project(Hudson.getInstance(),name);
}
};
}

View File

@ -0,0 +1,11 @@
package hudson.model;
/**
* Marker interface for {@link Action}s that should be displayed
* at the top of the project page.
*
* @author Kohsuke Kawaguchi
*/
public interface ProminentProjectAction extends Action {
// TODO: do the rendering of the part from the action page
}

View File

@ -0,0 +1,480 @@
package hudson.model;
import hudson.model.Node.Mode;
import hudson.util.OneShotEvent;
import java.util.Calendar;
import java.util.Comparator;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeSet;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.io.PrintWriter;
import java.io.FileOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.BufferedReader;
import java.io.FileInputStream;
import java.io.InputStreamReader;
/**
* Build queue.
*
* <p>
* This class implements the core scheduling logic.
*
* @author Kohsuke Kawaguchi
*/
public class Queue {
private static final Comparator<Item> itemComparator = new Comparator<Item>() {
public int compare(Item lhs, Item rhs) {
int r = lhs.timestamp.getTime().compareTo(rhs.timestamp.getTime());
if(r!=0) return r;
return lhs.id-rhs.id;
}
};
/**
* Items in the queue ordered by {@link Item#timestamp}.
*
* <p>
* This consists of {@link Item}s that cannot be run yet
* because its time has not yet come.
*/
private final Set<Item> queue = new TreeSet<Item>(itemComparator);
/**
* {@link Project}s that can be built immediately
* but blocked because another build is in progress.
*/
private final Set<Project> blockedProjects = new HashSet<Project>();
/**
* {@link Project}s that can be built immediately
* that are waiting for available {@link Executor}.
*/
private final List<Project> buildables = new LinkedList<Project>();
/**
* Data structure created for each idle {@link Executor}.
* This is an offer from the queue to an executor.
*
* <p>
* It eventually receives a {@link #project} to build.
*/
private static class JobOffer {
final Executor executor;
/**
* Used to wake up an executor, when it has an offered
* {@link Project} to build.
*/
final OneShotEvent event = new OneShotEvent();
/**
* The project that this {@link Executor} is going to build.
* (Or null, in which case event is used to trigger a queue maintenance.)
*/
Project project;
public JobOffer(Executor executor) {
this.executor = executor;
}
public void set(Project p) {
this.project = p;
event.signal();
}
public boolean isAvailable() {
return project==null && !executor.getOwner().isTemporarilyOffline();
}
public Node getNode() {
return executor.getOwner().getNode();
}
public boolean isNotExclusive() {
return getNode().getMode()== Mode.NORMAL;
}
}
private final Map<Executor,JobOffer> parked = new HashMap<Executor,JobOffer>();
/**
* Loads the queue contents that was {@link #save() saved}.
*/
public synchronized void load() {
// write out the contents of the queue
try {
File queueFile = getQueueFile();
if(!queueFile.exists())
return;
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(queueFile)));
String line;
while((line=in.readLine())!=null) {
Job j = Hudson.getInstance().getJob(line);
if(j instanceof Project)
((Project)j).scheduleBuild();
}
in.close();
// discard the queue file now that we are done
queueFile.delete();
} catch(IOException e) {
LOGGER.log(Level.WARNING, "Failed to load the queue file "+getQueueFile(),e);
}
}
/**
* Persists the queue contents to the disk.
*/
public synchronized void save() {
// write out the contents of the queue
try {
PrintWriter w = new PrintWriter(new FileOutputStream(
getQueueFile()));
for (Item i : getItems())
w.println(i.getProject().getName());
w.close();
} catch(IOException e) {
LOGGER.log(Level.WARNING, "Failed to write out the queue file "+getQueueFile(),e);
}
}
private File getQueueFile() {
return new File(Hudson.getInstance().getRootDir(),"queue.txt");
}
/**
* Schedule a new build for this project.
*/
public synchronized void add( Project p ) {
if(contains(p))
return; // no double queueing
// put the item in the queue
Calendar due = new GregorianCalendar();
due.add(Calendar.SECOND, p.getQuietPeriod());
queue.add(new Item(due,p));
scheduleMaintenance(); // let an executor know that a new item is in the queue.
}
public synchronized void cancel( Project p ) {
for (Iterator itr = queue.iterator(); itr.hasNext();) {
Item item = (Item) itr.next();
if(item.project==p) {
itr.remove();
return;
}
}
blockedProjects.remove(p);
buildables.remove(p);
}
public synchronized boolean isEmpty() {
return queue.isEmpty() && blockedProjects.isEmpty() && buildables.isEmpty();
}
private synchronized Item peek() {
return queue.iterator().next();
}
/**
* Gets a snapshot of items in the queue.
*/
public synchronized Item[] getItems() {
Item[] r = new Item[queue.size()+blockedProjects.size()+buildables.size()];
queue.toArray(r);
int idx=queue.size();
Calendar now = new GregorianCalendar();
for (Project p : blockedProjects) {
r[idx++] = new Item(now, p);
}
for (Project p : buildables) {
r[idx++] = new Item(now, p);
}
return r;
}
/**
* Returns true if this queue contaisn the said project.
*/
public synchronized boolean contains(Project p) {
// if this project is already scheduled,
// don't do anything
if(blockedProjects.contains(p) || buildables.contains(p))
return true;
for (Item item : queue) {
if (item.project == p)
return true;
}
return false;
}
/**
* Called by the executor to fetch something to build next.
*
* This method blocks until a next project becomes buildable.
*/
public Project pop() throws InterruptedException {
final Executor exec = Executor.currentExecutor();
boolean successfulReturn = false;
try {
while(true) {
final JobOffer offer = new JobOffer(exec);
long sleep = -1;
synchronized(this) {
// consider myself parked
assert !parked.containsKey(exec);
parked.put(exec,offer);
// reuse executor thread to do a queue maintainance.
// at the end of this we get all the buildable jobs
// in the buildables field.
maintain();
// allocate buildable jobs to executors
Iterator<Project> itr = buildables.iterator();
while(itr.hasNext()) {
Project p = itr.next();
JobOffer runner = choose(p);
if(runner==null)
// if we couldn't find the executor that fits,
// just leave it in the buildables list and
// check if we can execute other projects
continue;
// found a matching executor. use it.
runner.set(p);
itr.remove();
}
// we went over all the buildable projects and awaken
// all the executors that got work to do. now, go to sleep
// until this thread is awakened. If this executor assigned a job to
// itself above, the block method will return immediately.
if(!queue.isEmpty()) {
// wait until the first item in the queue is due
sleep = peek().timestamp.getTimeInMillis()-new GregorianCalendar().getTimeInMillis();
if(sleep <100) sleep =100; // avoid wait(0)
}
}
// this needs to be done outside synchronized block,
// so that executors can maintain a queue while others are sleeping
if(sleep ==-1)
offer.event.block();
else
offer.event.block(sleep);
synchronized(this) {
// am I woken up because I have a project to build?
if(offer.project!=null) {
// if so, just build it
successfulReturn = true;
return offer.project;
}
// otherwise run a queue maintenance
}
}
} finally {
synchronized(this) {
// remove myself from the parked list
JobOffer offer = parked.get(exec);
if(offer!=null) {
if(!successfulReturn && offer.project!=null) {
// we are already assigned a project,
// ask for someone else to build it.
// note that while this thread is waiting for CPU
// someone else can schedule this build again.
if(!contains(offer.project))
buildables.add(offer.project);
}
// since this executor might have been chosen for
// maintenance, schedule another one. Worst case
// we'll just run a pointless maintenance, and that's
// fine.
scheduleMaintenance();
}
}
}
}
/**
* Choses the executor to carry out the build for the given project.
*
* @return
* null if no {@link Executor} can run it.
*/
private JobOffer choose(Project p) {
if(Hudson.getInstance().isQuietingDown()) {
// if we are quieting down, don't run anything so that
// all executors will be free.
return null;
}
Node n = p.getAssignedNode();
if(n!=null) {
// if a project has assigned node, it can be only built on it
for (JobOffer offer : parked.values()) {
if(offer.isAvailable() && offer.getNode()==n)
return offer;
}
return null;
}
// otherwise let's see if the last node that this project was built is available
// it has up-to-date workspace, so that's usually preferable.
// (but we can't use an exclusive node)
n = p.getLastBuiltOn();
if(n!=null && n.getMode()==Mode.NORMAL) {
for (JobOffer offer : parked.values()) {
if(offer.isAvailable() && offer.getNode()==n)
return offer;
}
}
// duration of a build on a slave tends not to have an impact on
// the master/slave communication, so that means we should favor
// running long jobs on slaves.
Build succ = p.getLastSuccessfulBuild();
if(succ!=null && succ.getDuration()>15*60*1000) {
// consider a long job to be > 15 mins
for (JobOffer offer : parked.values()) {
if(offer.isAvailable() && offer.getNode() instanceof Slave && offer.isNotExclusive())
return offer;
}
}
// lastly, just look for any idle executor
for (JobOffer offer : parked.values()) {
if(offer.isAvailable() && offer.isNotExclusive())
return offer;
}
// nothing available
return null;
}
/**
* Checks the queue and runs anything that can be run.
*
* <p>
* When conditions are changed, this method should be invoked.
*
* This wakes up one {@link Executor} so that it will maintain a queue.
*/
public synchronized void scheduleMaintenance() {
// this code assumes that after this method is called
// no more executors will be offered job except by
// the pop() code.
for (Entry<Executor, JobOffer> av : parked.entrySet()) {
if(av.getValue().project==null) {
av.getValue().event.signal();
return;
}
}
}
/**
* Queue maintainance.
*
* Move projects between {@link #queue}, {@link #blockedProjects}, and {@link #buildables}
* appropriately.
*/
private synchronized void maintain() {
Iterator<Project> itr = blockedProjects.iterator();
while(itr.hasNext()) {
Project p = itr.next();
Build lastBuild = p.getLastBuild();
if (lastBuild == null || !lastBuild.isBuilding()) {
// ready to be executed
itr.remove();
buildables.add(p);
}
}
while(!queue.isEmpty()) {
Item top = peek();
if(!top.timestamp.before(new GregorianCalendar()))
return; // finished moving all ready items from queue
Build lastBuild = top.project.getLastBuild();
if(lastBuild==null || !lastBuild.isBuilding()) {
// ready to be executed immediately
queue.remove(top);
buildables.add(top.project);
} else {
// this can't be built know because another build is in progress
// set this project aside.
queue.remove(top);
blockedProjects.add(top.project);
}
}
}
/**
* Item in a queue.
*/
public class Item {
/**
* This item can be run after this time.
*/
final Calendar timestamp;
/**
* Project to be built.
*/
final Project project;
/**
* Unique number of this {@link Item}.
* Used to differenciate {@link Item}s with the same due date.
*/
final int id;
public Item(Calendar timestamp, Project project) {
this.timestamp = timestamp;
this.project = project;
synchronized(Queue.this) {
this.id = iota++;
}
}
public Calendar getTimestamp() {
return timestamp;
}
public Project getProject() {
return project;
}
public int getId() {
return id;
}
}
/**
* Unique number generator
*/
private int iota=0;
private static final Logger LOGGER = Logger.getLogger(Queue.class.getName());
}

View File

@ -0,0 +1,71 @@
package hudson.model;
import hudson.FeedAdapter;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.Iterator;
/**
* RSS related code.
*
* @author Kohsuke Kawaguchi
*/
final class RSS {
/**
* Parses trackback ping.
*/
public static void doTrackback( Object it, StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
req.setCharacterEncoding("UTF-8");
String title = req.getParameter("title");
String url = req.getParameter("url");
String excerpt = req.getParameter("excerpt");
String blog_name = req.getParameter("blog_name");
rsp.setStatus(HttpServletResponse.SC_OK);
rsp.setContentType("application/xml; charset=UTF-8");
PrintWriter pw = rsp.getWriter();
pw.println("<response>");
pw.println("<error>"+(url!=null?0:1)+"</error>");
if(url==null) {
pw.println("<message>url must be specified</message>");
}
pw.println("</response>");
pw.close();
}
/**
* Sends the RSS feed to the client.
*
* @param title
* Title of the feed.
* @param url
* URL of the model object that owns this feed
* @param entries
* Entries to be listed in the RSS feed.
* @param adapter
* Controls how to render entries to RSS.
*/
public static <E> void forwardToRss(String title, String url, Collection<? extends E> entries, FeedAdapter<E> adapter, StaplerRequest req, HttpServletResponse rsp) throws IOException, ServletException {
req.setAttribute("adapter",adapter);
req.setAttribute("title",title);
req.setAttribute("url",url);
req.setAttribute("entries",entries);
String flavor = req.getParameter("flavor");
if(flavor==null) flavor="atom";
req.getView(Hudson.getInstance(),"/hudson/"+flavor+".jelly").forward(req,rsp);
}
}

View File

@ -0,0 +1,73 @@
package hudson.model;
import com.thoughtworks.xstream.converters.Converter;
import com.thoughtworks.xstream.converters.basic.AbstractBasicConverter;
/**
* The build outcome.
*
* @author Kohsuke Kawaguchi
*/
public final class Result {
/**
* The build didn't have any fatal errors not errors.
*/
public static final Result SUCCESS = new Result("SUCCESS",0);
/**
* The build didn't have any fatal errors but some errors.
*/
public static final Result UNSTABLE = new Result("UNSTABLE",1);
/**
* The build had a fatal error.
*/
public static final Result FAILURE = new Result("FAILURE",2);
/**
* The build was manually aborted.
*/
public static final Result ABORTED = new Result("ABORTED",3);
private final String name;
/**
* Bigger numbers are worse.
*/
private final int ordinal;
private Result(String name, int ordinal) {
this.name = name;
this.ordinal = ordinal;
}
/**
* Combines two {@link Result}s and returns the worse one.
*/
public Result combine(Result that) {
if(this.ordinal < that.ordinal)
return that;
else
return this;
}
public boolean isWorseThan(Result that) {
return this.ordinal > that.ordinal;
}
public String toString() {
return name;
}
private static final Result[] all = new Result[] {SUCCESS,UNSTABLE,FAILURE,ABORTED};
public static final Converter conv = new AbstractBasicConverter () {
public boolean canConvert(Class clazz) {
return clazz==Result.class;
}
protected Object fromString(String s) {
for (Result r : all)
if (s.equals(r.name))
return r;
return FAILURE;
}
};
}

View File

@ -0,0 +1,800 @@
package hudson.model;
import static hudson.Util.combine;
import com.thoughtworks.xstream.XStream;
import hudson.CloseProofOutputStream;
import hudson.ExtensionPoint;
import hudson.Util;
import hudson.XmlFile;
import hudson.FeedAdapter;
import hudson.tasks.BuildStep;
import hudson.tasks.LogRotator;
import hudson.tasks.test.AbstractTestResultAction;
import hudson.util.CharSpool;
import hudson.util.IOException2;
import hudson.util.XStream2;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Writer;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Comparator;
import java.util.GregorianCalendar;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Logger;
/**
* A particular execution of {@link Job}.
*
* <p>
* Custom {@link Run} type is always used in conjunction with
* a custom {@link Job} type, so there's no separate registration
* mechanism for custom {@link Run} types.
*
* @author Kohsuke Kawaguchi
*/
public abstract class Run <JobT extends Job<JobT,RunT>,RunT extends Run<JobT,RunT>>
extends DirectoryHolder implements ExtensionPoint {
protected transient final JobT project;
/**
* Build number.
*
* <p>
* In earlier versions &lt; 1.24, this number is not unique nor continuous,
* but going forward, it will, and this really replaces the build id.
*/
public /*final*/ int number;
/**
* Previous build. Can be null.
* These two fields are maintained and updated by {@link RunMap}.
*/
protected volatile transient RunT previousBuild;
/**
* Next build. Can be null.
*/
protected volatile transient RunT nextBuild;
/**
* When the build is scheduled.
*/
protected transient final Calendar timestamp;
/**
* The build result.
* This value may change while the state is in {@link State#BUILDING}.
*/
protected volatile Result result;
/**
* Human-readable description. Can be null.
*/
protected volatile String description;
/**
* The current build state.
*/
protected volatile transient State state;
private static enum State {
NOT_STARTED,
BUILDING,
COMPLETED
}
/**
* Number of milli-seconds it took to run this build.
*/
protected long duration;
/**
* Keeps this log entries.
*/
private boolean keepLog;
protected static final SimpleDateFormat ID_FORMATTER = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss");
/**
* Creates a new {@link Run}.
*/
protected Run(JobT job) throws IOException {
this(job, new GregorianCalendar());
this.number = project.assignBuildNumber();
}
/**
* Constructor for creating a {@link Run} object in
* an arbitrary state.
*/
protected Run(JobT job, Calendar timestamp) {
this.project = job;
this.timestamp = timestamp;
this.state = State.NOT_STARTED;
}
/**
* Loads a run from a log file.
*/
protected Run(JobT project, File buildDir) throws IOException {
this(project, new GregorianCalendar());
try {
this.timestamp.setTime(ID_FORMATTER.parse(buildDir.getName()));
} catch (ParseException e) {
throw new IOException2("Invalid directory name "+buildDir,e);
} catch (NumberFormatException e) {
throw new IOException2("Invalid directory name "+buildDir,e);
}
this.state = State.COMPLETED;
this.result = Result.FAILURE; // defensive measure. value should be overwritten by unmarshal, but just in case the saved data is inconsistent
getDataFile().unmarshal(this); // load the rest of the data
}
/**
* Returns the build result.
*
* <p>
* When a build is {@link #isBuilding() in progress}, this method
* may return null or a temporary intermediate result.
*/
public final Result getResult() {
return result;
}
public void setResult(Result r) {
// state can change only when we are building
assert state==State.BUILDING;
StackTraceElement caller = findCaller(Thread.currentThread().getStackTrace(),"setResult");
// result can only get worse
if(result==null) {
result = r;
LOGGER.info(toString()+" : result is set to "+r+" by "+caller);
} else {
if(r.isWorseThan(result)) {
LOGGER.info(toString()+" : result is set to "+r+" by "+caller);
result = r;
}
}
}
private StackTraceElement findCaller(StackTraceElement[] stackTrace, String callee) {
for(int i=0; i<stackTrace.length-1; i++) {
StackTraceElement e = stackTrace[i];
if(e.getMethodName().equals(callee))
return stackTrace[i+1];
}
return null; // not found
}
/**
* Returns true if the build is not completed yet.
*/
public boolean isBuilding() {
return state!=State.COMPLETED;
}
/**
* Gets the {@link Executor} building this job, if it's being built.
* Otherwise null.
*/
public Executor getExecutor() {
for( Computer c : Hudson.getInstance().getComputers() ) {
for (Executor e : c.getExecutors()) {
if(e.getCurrentBuild()==(Object)this)
return e;
}
}
return null;
}
/**
* Returns true if this log file should be kept and not deleted.
*
* This is used as a signal to the {@link LogRotator}.
*/
public boolean isKeepLog() {
return keepLog;
}
/**
* The project this build is for.
*/
public JobT getParent() {
return project;
}
/**
* When the build is scheduled.
*/
public Calendar getTimestamp() {
return timestamp;
}
public String getDescription() {
return description;
}
/**
* Gets the string that says how long since this build has scheduled.
*
* @return
* string like "3 minutes" "1 day" etc.
*/
public String getTimestampString() {
long duration = new GregorianCalendar().getTimeInMillis()-timestamp.getTimeInMillis();
return Util.getTimeSpanString(duration);
}
/**
* Returns the timestamp formatted in xs:dateTime.
*/
public String getTimestampString2() {
return Util.XS_DATETIME_FORMATTER.format(timestamp.getTime());
}
/**
* Gets the string that says how long the build took to run.
*/
public String getDurationString() {
return Util.getTimeSpanString(duration);
}
/**
* Gets the millisecond it took to build.
*/
public long getDuration() {
return duration;
}
/**
* Gets the icon color for display.
*/
public String getIconColor() {
if(!isBuilding()) {
// already built
if(result==Result.SUCCESS)
return "blue";
if(result== Result.UNSTABLE)
return "yellow";
else
return "red";
}
// a new build is in progress
String baseColor;
if(previousBuild==null)
baseColor = "grey";
else
baseColor = previousBuild.getIconColor();
return baseColor +"_anime";
}
/**
* Returns true if the build is still queued and hasn't started yet.
*/
public boolean hasntStartedYet() {
return state ==State.NOT_STARTED;
}
public String toString() {
return project.getName()+" #"+number;
}
public String getDisplayName() {
return "#"+number;
}
public int getNumber() {
return number;
}
public RunT getPreviousBuild() {
return previousBuild;
}
/**
* Returns the last build that didn't fail before this build.
*/
public RunT getPreviousNotFailedBuild() {
RunT r=previousBuild;
while( r!=null && r.getResult()==Result.FAILURE )
r=r.previousBuild;
return r;
}
/**
* Returns the last failed build before this build.
*/
public RunT getPreviousFailedBuild() {
RunT r=previousBuild;
while( r!=null && r.getResult()!=Result.FAILURE )
r=r.previousBuild;
return r;
}
public RunT getNextBuild() {
return nextBuild;
}
// I really messed this up. I'm hoping to fix this some time
// it shouldn't have trailing '/', and instead it should have leading '/'
public String getUrl() {
return project.getUrl()+getNumber()+'/';
}
/**
* Unique ID of this build.
*/
public String getId() {
return ID_FORMATTER.format(timestamp.getTime());
}
public File getRootDir() {
File f = new File(project.getBuildDir(),getId());
f.mkdirs();
return f;
}
/**
* Gets the directory where the artifacts are archived.
*/
public File getArtifactsDir() {
return new File(getRootDir(),"archive");
}
/**
* Gets the first {@value #CUTOFF} artifacts (relative to {@link #getArtifactsDir()}.
*/
public List<Artifact> getArtifacts() {
List<Artifact> r = new ArrayList<Artifact>();
addArtifacts(getArtifactsDir(),"",r);
return r;
}
/**
* Returns true if this run has any artifacts.
*
* <p>
* The strange method name is so that we can access it from EL.
*/
public boolean getHasArtifacts() {
return !getArtifacts().isEmpty();
}
private void addArtifacts( File dir, String path, List<Artifact> r ) {
String[] children = dir.list();
if(children==null) return;
for (String child : children) {
if(r.size()>CUTOFF)
return;
File sub = new File(dir, child);
if (sub.isDirectory()) {
addArtifacts(sub, path + child + '/', r);
} else {
r.add(new Artifact(path + child));
}
}
}
private static final int CUTOFF = 17; // 0, 1,... 16, and then "too many"
/**
* A build artifact.
*/
public class Artifact {
/**
* Relative path name from {@link Run#getArtifactsDir()}
*/
private final String relativePath;
private Artifact(String relativePath) {
this.relativePath = relativePath;
}
/**
* Gets the artifact file.
*/
public File getFile() {
return new File(getArtifactsDir(),relativePath);
}
/**
* Returns just the file name portion, without the path.
*/
public String getFileName() {
return getFile().getName();
}
public String toString() {
return relativePath;
}
}
/**
* Returns the log file.
*/
public File getLogFile() {
return new File(getRootDir(),"log");
}
/**
* Deletes this build and its entire log
*
* @throws IOException
* if we fail to delete.
*/
public synchronized void delete() throws IOException {
File rootDir = getRootDir();
File tmp = new File(rootDir.getParentFile(),'.'+rootDir.getName());
if(!rootDir.renameTo(tmp))
throw new IOException(rootDir+" is in use");
Util.deleteRecursive(tmp);
getParent().removeRun((RunT)this);
}
protected static interface Runner {
Result run( BuildListener listener ) throws Exception;
void post( BuildListener listener );
}
protected final void run(Runner job) {
if(result!=null)
return; // already built.
onStartBuilding();
try {
// to set the state to COMPLETE in the end, even if the thread dies abnormally.
// otherwise the queue state becomes inconsistent
long start = System.currentTimeMillis();
BuildListener listener=null;
try {
try {
final PrintStream log = new PrintStream(new FileOutputStream(getLogFile()));
listener = new BuildListener() {
final PrintWriter pw = new PrintWriter(new CloseProofOutputStream(log),true);
public void started() {}
public PrintStream getLogger() {
return log;
}
public PrintWriter error(String msg) {
pw.println("ERROR: "+msg);
return pw;
}
public PrintWriter fatalError(String msg) {
return error(msg);
}
public void finished(Result result) {
pw.close();
log.close();
}
};
listener.started();
result = job.run(listener);
LOGGER.info(toString()+" main build action completed: "+result);
} catch (ThreadDeath t) {
throw t;
} catch( Throwable e ) {
handleFatalBuildProblem(listener,e);
result = Result.FAILURE;
}
// even if the main buidl fails fatally, try to run post build processing
job.post(listener);
} catch (ThreadDeath t) {
throw t;
} catch( Throwable e ) {
handleFatalBuildProblem(listener,e);
result = Result.FAILURE;
}
long end = System.currentTimeMillis();
duration = end-start;
if(listener!=null)
listener.finished(result);
try {
save();
} catch (IOException e) {
e.printStackTrace();
}
try {
LogRotator lr = getParent().getLogRotator();
if(lr!=null)
lr.perform(getParent());
} catch (IOException e) {
e.printStackTrace();
}
} finally {
onEndBuilding();
}
}
/**
* Handles a fatal build problem (exception) that occured during the build.
*/
private void handleFatalBuildProblem(BuildListener listener, Throwable e) {
if(listener!=null) {
if(e instanceof IOException)
Util.displayIOException((IOException)e,listener);
Writer w = listener.fatalError(e.getMessage());
if(w!=null) {
try {
e.printStackTrace(new PrintWriter(w));
w.close();
} catch (IOException e1) {
// ignore
}
}
}
}
/**
* Called when a job started building.
*/
protected void onStartBuilding() {
state = State.BUILDING;
}
/**
* Called when a job finished building normally or abnormally.
*/
protected void onEndBuilding() {
state = State.COMPLETED;
if(result==null) {
// shouldn't happen, but be defensive until we figure out why
result = Result.FAILURE;
LOGGER.warning(toString()+": No build result is set, so marking as failure. This shouldn't happen");
}
}
/**
* Save the settings to a file.
*/
public synchronized void save() throws IOException {
getDataFile().write(this);
}
private XmlFile getDataFile() {
return new XmlFile(XSTREAM,new File(getRootDir(),"build.xml"));
}
/**
* Gets the log of the build as a string.
*
* I know, this isn't terribly efficient!
*/
public String getLog() throws IOException {
return Util.loadFile(getLogFile());
}
public void doBuildStatus( StaplerRequest req, StaplerResponse rsp ) throws IOException {
// see Hudson.doNocacheImages. this is a work around for a bug in Firefox
rsp.sendRedirect2(req.getContextPath()+"/nocacheImages/48x48/"+getBuildStatusUrl());
}
public String getBuildStatusUrl() {
return getIconColor()+".gif";
}
public static class Summary {
/**
* Is this build worse or better, compared to the previous build?
*/
public boolean isWorse;
public String message;
public Summary(boolean worse, String message) {
this.isWorse = worse;
this.message = message;
}
}
/**
* Gets an object that computes the single line summary of this build.
*/
public Summary getBuildStatusSummary() {
Run prev = getPreviousBuild();
if(getResult()==Result.SUCCESS) {
if(prev==null || prev.getResult()== Result.SUCCESS)
return new Summary(false,"stable");
else
return new Summary(false,"back to normal");
}
if(getResult()==Result.FAILURE) {
RunT since = getPreviousNotFailedBuild();
if(since==null)
return new Summary(false,"broken for a long time");
if(since==prev)
return new Summary(true,"broken since this build");
return new Summary(false,"broekn since "+since.getDisplayName());
}
if(getResult()==Result.ABORTED)
return new Summary(false,"aborted");
if(getResult()==Result.UNSTABLE) {
if(((Run)this) instanceof Build) {
AbstractTestResultAction trN = ((Build)(Run)this).getTestResultAction();
AbstractTestResultAction trP = prev==null ? null : ((Build) prev).getTestResultAction();
if(trP==null) {
if(trN!=null && trN.getFailCount()>0)
return new Summary(false,combine(trN.getFailCount(),"test faliure"));
else // ???
return new Summary(false,"unstable");
}
if(trP.getFailCount()==0)
return new Summary(true,combine(trP.getFailCount(),"test")+" started to fail");
if(trP.getFailCount() < trN.getFailCount())
return new Summary(true,combine(trN.getFailCount()-trP.getFailCount(),"more test")
+" are failing ("+trN.getFailCount()+" total)");
if(trP.getFailCount() > trN.getFailCount())
return new Summary(false,combine(trP.getFailCount()-trN.getFailCount(),"less test")
+" are failing ("+trN.getFailCount()+" total)");
return new Summary(false,combine(trN.getFailCount(),"test")+" are still failing");
}
}
return new Summary(false,"?");
}
/**
* Serves the artifacts.
*/
public void doArtifact( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
serveFile(req, rsp, getArtifactsDir(), "package.gif", true);
}
/**
* Returns the build number in the body.
*/
public void doBuildNumber( StaplerRequest req, StaplerResponse rsp ) throws IOException {
rsp.setContentType("text/plain");
rsp.setCharacterEncoding("US-ASCII");
rsp.setStatus(HttpServletResponse.SC_OK);
rsp.getWriter().print(number);
}
/**
* Handles incremental log output.
*/
public void doProgressiveLog( StaplerRequest req, StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain");
rsp.setCharacterEncoding("UTF-8");
rsp.setStatus(HttpServletResponse.SC_OK);
boolean completed = !isBuilding();
File logFile = getLogFile();
if(!logFile.exists()) {
// file doesn't exist yet
rsp.addHeader("X-Text-Size","0");
rsp.addHeader("X-More-Data","true");
return;
}
LargeText text = new LargeText(logFile,completed);
long start = 0;
String s = req.getParameter("start");
if(s!=null)
start = Long.parseLong(s);
CharSpool spool = new CharSpool();
long r = text.writeLogTo(start,spool);
rsp.addHeader("X-Text-Size",String.valueOf(r));
if(!completed)
rsp.addHeader("X-More-Data","true");
spool.writeTo(rsp.getWriter());
}
public void doToggleLogKeep( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
keepLog = !keepLog;
save();
rsp.forwardToPreviousPage(req);
}
/**
* Accepts the new description.
*/
public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
description = req.getParameter("description");
save();
rsp.sendRedirect("."); // go to the top page
}
/**
* Returns the map that contains environmental variables for this build.
*
* Used by {@link BuildStep}s that invoke external processes.
*/
public Map<String,String> getEnvVars() {
Map<String,String> env = new HashMap<String,String>();
env.put("BUILD_NUMBER",String.valueOf(number));
env.put("BUILD_ID",getId());
env.put("BUILD_TAG","hudson-"+getParent().getName()+"-"+number);
env.put("JOB_NAME",getParent().getName());
return env;
}
private static final XStream XSTREAM = new XStream2();
static {
XSTREAM.alias("build",Build.class);
XSTREAM.registerConverter(Result.conv);
}
private static final Logger LOGGER = Logger.getLogger(Run.class.getName());
/**
* Sort by date. Newer ones first.
*/
public static final Comparator<Run> ORDER_BY_DATE = new Comparator<Run>() {
public int compare(Run lhs, Run rhs) {
return -lhs.getTimestamp().compareTo(rhs.getTimestamp());
}
};
/**
* {@link FeedAdapter} to produce feed from the summary of this build.
*/
public static final FeedAdapter<Run> FEED_ADAPTER = new FeedAdapter<Run>() {
public String getEntryTitle(Run entry) {
return entry+" ("+entry.getResult()+")";
}
public String getEntryUrl(Run entry) {
return entry.getUrl();
}
public String getEntryID(Run entry) {
return "tag:"+entry.getParent().getName()+':'+entry.getId();
}
public Calendar getEntryTimestamp(Run entry) {
return entry.getTimestamp();
}
};
}

View File

@ -0,0 +1,184 @@
package hudson.model;
import java.util.AbstractMap;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.Comparator;
import java.util.Collections;
import java.util.Map;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
/**
* {@link Map} from build number to {@link Run}.
*
* <p>
* This class is multi-thread safe by using copy-on-write technique,
* and it also updates the bi-directional links within {@link Run}
* accordingly.
*
* @author Kohsuke Kawaguchi
*/
public final class RunMap<R extends Run<?,R>> extends AbstractMap<Integer,R> implements SortedMap<Integer,R> {
// copy-on-write map
private transient volatile SortedMap<Integer,R> builds =
new TreeMap<Integer,R>(COMPARATOR);
/**
* Read-only view of this map.
*/
private final SortedMap<Integer,R> view = Collections.unmodifiableSortedMap(this);
public Set<Entry<Integer,R>> entrySet() {
// since the map is copy-on-write, make sure no one modifies it
return Collections.unmodifiableSet(builds.entrySet());
}
public synchronized R put(R value) {
return put(value.getNumber(),value);
}
public synchronized R put(Integer key, R value) {
// copy-on-write update
TreeMap<Integer,R> m = new TreeMap<Integer,R>(builds);
R r = update(m, key, value);
this.builds = m;
return r;
}
public synchronized void putAll(Map<? extends Integer,? extends R> rhs) {
// copy-on-write update
TreeMap<Integer,R> m = new TreeMap<Integer,R>(builds);
for (Map.Entry<? extends Integer,? extends R> e : rhs.entrySet())
update(m, e.getKey(), e.getValue());
this.builds = m;
}
private R update(TreeMap<Integer, R> m, Integer key, R value) {
// things are bit tricky because this map is order so that the newest one comes first,
// yet 'nextBuild' refers to the newer build.
R first = m.isEmpty() ? null : m.get(m.firstKey());
R r = m.put(key, value);
SortedMap<Integer,R> head = m.headMap(key);
if(!head.isEmpty()) {
R prev = m.get(head.lastKey());
value.previousBuild = prev.previousBuild;
value.nextBuild = prev;
if(value.previousBuild!=null)
value.previousBuild.nextBuild = value;
prev.previousBuild=value;
} else {
value.previousBuild = first;
value.nextBuild = null;
if(first!=null)
first.nextBuild = value;
}
return r;
}
public synchronized boolean remove(R run) {
if(run.nextBuild!=null)
run.nextBuild.previousBuild = run.previousBuild;
if(run.previousBuild!=null)
run.previousBuild.nextBuild = run.nextBuild;
// copy-on-write update
TreeMap<Integer,R> m = new TreeMap<Integer,R>(builds);
R r = m.remove(run.getNumber());
this.builds = m;
return r!=null;
}
public synchronized void reset(TreeMap<Integer,R> builds) {
this.builds = new TreeMap<Integer,R>(COMPARATOR);
putAll(builds);
}
/**
* Gets the read-only view of this map.
*/
public SortedMap<Integer,R> getView() {
return view;
}
//
// SortedMap delegation
//
public Comparator<? super Integer> comparator() {
return builds.comparator();
}
public SortedMap<Integer, R> subMap(Integer fromKey, Integer toKey) {
return builds.subMap(fromKey, toKey);
}
public SortedMap<Integer, R> headMap(Integer toKey) {
return builds.headMap(toKey);
}
public SortedMap<Integer, R> tailMap(Integer fromKey) {
return builds.tailMap(fromKey);
}
public Integer firstKey() {
return builds.firstKey();
}
public Integer lastKey() {
return builds.lastKey();
}
public static final Comparator<Comparable> COMPARATOR = new Comparator<Comparable>() {
public int compare(Comparable o1, Comparable o2) {
return -o1.compareTo(o2);
}
};
/**
* {@link Run} factory.
*/
public interface Constructor<R extends Run<?,R>> {
R create(File dir) throws IOException;
}
/**
* Fills in {@link RunMap} by loading build records from the file system.
*
* @param job
* Job that owns this map.
* @param cons
* Used to create new instance of {@link Run}.
*/
public synchronized void load(Job job, Constructor<R> cons) {
TreeMap<Integer,R> builds = new TreeMap<Integer,R>(RunMap.COMPARATOR);
File buildDir = job.getBuildDir();
buildDir.mkdirs();
String[] buildDirs = buildDir.list(new FilenameFilter() {
public boolean accept(File dir, String name) {
return new File(dir,name).isDirectory();
}
});
for( String build : buildDirs ) {
File d = new File(buildDir,build);
if(new File(d,"build.xml").exists()) {
// if the build result file isn't in the directory, ignore it.
try {
R b = cons.create(d);
builds.put( b.getNumber(), b );
} catch (IOException e) {
e.printStackTrace();
}
}
}
reset(builds);
}
}

View File

@ -0,0 +1,224 @@
package hudson.model;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Proc;
import hudson.Util;
import hudson.util.ArgumentListBuilder;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Date;
/**
* Information about a Hudson slave node.
*
* @author Kohsuke Kawaguchi
*/
public final class Slave implements Node {
/**
* Name of this slave node.
*/
private final String name;
/**
* Description of this node.
*/
private final String description;
/**
* Commands to run to post a job on this machine.
*/
private final String command;
/**
* Path to the root of the workspace
* from within this node, such as "/hudson"
*/
private final String remoteFS;
/**
* Path to the root of the remote workspace of this node,
* such as "/net/slave1/hudson"
*/
private final File localFS;
/**
* Number of executors of this node.
*/
private int numExecutors = 2;
/**
* Job allocation strategy.
*/
private Mode mode;
public Slave(String name, String description, String command, String remoteFS, File localFS, int numExecutors, Mode mode) {
this.name = name;
this.description = description;
this.command = command;
this.remoteFS = remoteFS;
this.localFS = localFS;
this.numExecutors = numExecutors;
this.mode = mode;
}
public String getNodeName() {
return name;
}
public String getCommand() {
return command;
}
public String[] getCommandTokens() {
return Util.tokenize(command);
}
public String getRemoteFS() {
return remoteFS;
}
public File getLocalFS() {
return localFS;
}
public String getNodeDescription() {
return description;
}
public FilePath getFilePath() {
return new FilePath(localFS,remoteFS);
}
public int getNumExecutors() {
return numExecutors;
}
public Mode getMode() {
return mode;
}
/**
* Estimates the clock difference with this slave.
*
* @return
* difference in milli-seconds.
* a large positive value indicates that the master is ahead of the slave,
* and negative value indicates otherwise.
*/
public long getClockDifference() throws IOException {
File testFile = new File(localFS,"clock.skew");
FileOutputStream os = new FileOutputStream(testFile);
long now = new Date().getTime();
os.close();
long r = now - testFile.lastModified();
testFile.delete();
return r;
}
/**
* Gets the clock difference in HTML string.
*/
public String getClockDifferenceString() {
try {
long diff = getClockDifference();
if(-1000<diff && diff <1000)
return "In sync"; // clock is in sync
long abs = Math.abs(diff);
String s = Util.getTimeSpanString(abs);
if(diff<0)
s += " ahead";
else
s += " behind";
if(abs>100*60) // more than a minute difference
s = "<span class='error'>"+s+"</span>";
return s;
} catch (IOException e) {
return "<span class='error'>Unable to check</span>";
}
}
public Launcher createLauncher(TaskListener listener) {
if(command.length()==0) // local alias
return new Launcher(listener);
return new Launcher(listener) {
@Override
public Proc launch(String[] cmd, String[] env, OutputStream out, FilePath workDir) throws IOException {
return super.launch(prepend(cmd,env,workDir), env, null, out);
}
@Override
public Proc launch(String[] cmd, String[] env, InputStream in, OutputStream out) throws IOException {
return super.launch(prepend(cmd,env,CURRENT_DIR), env, in, out);
}
@Override
public boolean isUnix() {
// Err on Unix, since we expect that to be the common slaves
return remoteFS.indexOf('\\')==-1;
}
private String[] prepend(String[] cmd, String[] env, FilePath workDir) {
ArgumentListBuilder r = new ArgumentListBuilder();
r.add(getCommandTokens());
r.add(getFilePath().child("bin").child("slave").getRemote());
r.addQuoted(workDir.getRemote());
for (String s : env) {
int index =s.indexOf('=');
r.add(s.substring(0,index));
r.add(s.substring(index+1));
}
r.add("--");
for (String c : cmd) {
// ssh passes the command and parameters in one string.
// see RFC 4254 section 6.5.
// so the consequence that we need to give
// {"ssh",...,"ls","\"a b\""} to list a file "a b".
// If we just do
// {"ssh",...,"ls","a b"} (which is correct if this goes directly to Runtime.exec),
// then we end up executing "ls","a","b" on the other end.
//
// I looked at rsh source code, and that behave the same way.
if(c.indexOf(' ')>=0)
r.addQuoted(c);
else
r.add(c);
}
return r.toCommandArray();
}
};
}
public FilePath getWorkspaceRoot() {
return getFilePath().child("workspace");
}
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final Slave that = (Slave) o;
return name.equals(that.name);
}
public int hashCode() {
return name.hashCode();
}
private static final FilePath CURRENT_DIR = new FilePath(new File("."));
}

View File

@ -0,0 +1,50 @@
package hudson.model;
import hudson.util.WriterOutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.Writer;
/**
* {@link BuildListener} that writes to a {@link Writer}.
* @author Kohsuke Kawaguchi
*/
public class StreamBuildListener implements BuildListener {
private final PrintWriter w;
private final PrintStream ps;
public StreamBuildListener(Writer w) {
this(new PrintWriter(w));
}
public StreamBuildListener(PrintWriter w) {
this.w = w;
// unless we auto-flash, PrintStream will use BufferedOutputStream internally,
// and break ordering
this.ps = new PrintStream(new WriterOutputStream(w),true);
}
public void started() {
w.println("started");
}
public PrintStream getLogger() {
return ps;
}
public PrintWriter error(String msg) {
w.println("ERROR: "+msg);
return w;
}
public PrintWriter fatalError(String msg) {
w.println("FATAL: "+msg);
return w;
}
public void finished(Result result) {
w.println("finished: "+result);
}
}

View File

@ -0,0 +1,44 @@
package hudson.model;
import hudson.util.StreamTaskListener;
import hudson.util.NullStream;
import java.io.PrintStream;
import java.io.PrintWriter;
/**
* Receives events that happen during some task execution,
* such as a build or SCM change polling.
*
* @author Kohsuke Kawaguchi
*/
public interface TaskListener {
/**
* This writer will receive the output of the build.
*
* @return
* must be non-null.
*/
PrintStream getLogger();
/**
* An error in the build.
*
* @return
* A writer to receive details of the error. Not null.
*/
PrintWriter error(String msg);
/**
* A fatal error in the build.
*
* @return
* A writer to receive details of the error. Not null.
*/
PrintWriter fatalError(String msg);
/**
* {@link TaskListener} that discards the output.
*/
public static final TaskListener NULL = new StreamTaskListener(new NullStream());
}

View File

@ -0,0 +1,261 @@
package hudson.model;
import com.thoughtworks.xstream.XStream;
import hudson.FeedAdapter;
import hudson.XmlFile;
import hudson.model.Descriptor.FormException;
import hudson.scm.ChangeLogSet;
import hudson.util.RunList;
import hudson.util.XStream2;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Represents a user.
*
* @author Kohsuke Kawaguchi
*/
public class User extends AbstractModelObject {
private transient final String id;
private volatile String fullName;
private volatile String description;
/**
* List of {@link UserProperty}s configured for this project.
* Copy-on-write semantics.
*/
private volatile List<UserProperty> properties = new ArrayList<UserProperty>();
private User(String id) {
this.id = id;
this.fullName = id; // fullName defaults to name
for (UserPropertyDescriptor d : UserProperties.LIST) {
UserProperty up = d.newInstance(this);
if(up!=null)
properties.add(up);
}
// load the other data from disk if it's available
XmlFile config = getConfigFile();
try {
if(config.exists())
config.unmarshal(this);
} catch (IOException e) {
LOGGER.log(Level.SEVERE, "Failed to load "+config,e);
}
for (UserProperty p : properties)
p.setUser(this);
}
public String getId() {
return id;
}
public String getUrl() {
return "user/"+ id;
}
/**
* Gets the human readable name of this user.
* This is configurable by the user.
*
* @return
* never null.
*/
public String getFullName() {
return fullName;
}
public String getDescription() {
return description;
}
/**
* Gets the user properties configured for this user.
*/
public Map<Descriptor<UserProperty>,UserProperty> getProperties() {
return Descriptor.toMap(properties);
}
/**
* Gets the specific property, or null.
*/
public <T extends UserProperty> T getProperty(Class<T> clazz) {
for (UserProperty p : properties) {
if(clazz.isInstance(p))
return (T)p; // can't use Class.cast as that's 5.0 feature
}
return null;
}
/**
* Accepts the new description.
*/
public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
req.setCharacterEncoding("UTF-8");
description = req.getParameter("description");
save();
rsp.sendRedirect("."); // go to the top page
}
public static User get(String name) {
if(name==null)
return null;
synchronized(byName) {
User u = byName.get(name);
if(u==null) {
u = new User(name);
byName.put(name,u);
}
return u;
}
}
/**
* Returns the user name.
*/
public String getDisplayName() {
return getFullName();
}
/**
* Gets the list of {@link Build}s that include changes by this user,
* by the timestamp order.
*
* TODO: do we need some index for this?
*/
public List<Build> getBuilds() {
List<Build> r = new ArrayList<Build>();
for (Project p : Hudson.getInstance().getProjects()) {
for (Build b : p.getBuilds()) {
for (ChangeLogSet.Entry e : b.getChangeSet()) {
if(e.getAuthor()==this) {
r.add(b);
break;
}
}
}
}
Collections.sort(r,Run.ORDER_BY_DATE);
return r;
}
public String toString() {
return fullName;
}
/**
* The file we save our configuration.
*/
protected final XmlFile getConfigFile() {
return new XmlFile(XSTREAM,new File(Hudson.getInstance().getRootDir(),"users/"+ id +"/config.xml"));
}
/**
* Save the settings to a file.
*/
public synchronized void save() throws IOException {
XmlFile config = getConfigFile();
config.mkdirs();
config.write(this);
}
/**
* Accepts submission from the configuration page.
*/
public void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
try {
fullName = req.getParameter("fullName");
description = req.getParameter("description");
List<UserProperty> props = new ArrayList<UserProperty>();
for (Descriptor<UserProperty> d : UserProperties.LIST)
props.add(d.newInstance(req));
this.properties = props;
save();
rsp.sendRedirect(".");
} catch (FormException e) {
sendError(e,req,rsp);
}
}
public void doRssAll( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " all builds", RunList.fromRuns(getBuilds()));
}
public void doRssFailed( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
rss(req, rsp, " regression builds", RunList.fromRuns(getBuilds()).regressionOnly());
}
private void rss(StaplerRequest req, StaplerResponse rsp, String suffix, RunList runs) throws IOException, ServletException {
RSS.forwardToRss(getDisplayName()+ suffix, getUrl(),
runs.newBuilds(), FEED_ADAPTER, req, rsp );
}
/**
* Keyed by {@link User#id}.
*/
private static final Map<String,User> byName = new HashMap<String,User>();
/**
* Used to load/save user configuration.
*/
private static final XStream XSTREAM = new XStream2();
private static final Logger LOGGER = Logger.getLogger(User.class.getName());
static {
XSTREAM.alias("user",User.class);
}
/**
* {@link FeedAdapter} to produce build status summary in the feed.
*/
public static final FeedAdapter<Run> FEED_ADAPTER = new FeedAdapter<Run>() {
public String getEntryTitle(Run entry) {
return entry+" : "+entry.getBuildStatusSummary().message;
}
public String getEntryUrl(Run entry) {
return entry.getUrl();
}
public String getEntryID(Run entry) {
return "tag:"+entry.getParent().getName()+':'+entry.getId();
}
public Calendar getEntryTimestamp(Run entry) {
return entry.getTimestamp();
}
};
}

View File

@ -0,0 +1,15 @@
package hudson.model;
import hudson.tasks.Mailer;
import java.util.List;
/**
* List of all installed {@link UserProperty} types.
* @author Kohsuke Kawaguchi
*/
public class UserProperties {
public static final List<UserPropertyDescriptor> LIST = Descriptor.<UserPropertyDescriptor>toList(
Mailer.UserProperty.DESCRIPTOR
);
}

View File

@ -0,0 +1,28 @@
package hudson.model;
import hudson.Plugin;
import hudson.ExtensionPoint;
/**
* Extensible property of {@link User}.
*
* <p>
* {@link Plugin}s can extend this to define custom properties
* for {@link User}s. {@link UserProperty}s show up in the user
* configuration screen, and they are persisted with the user object.
*
* @author Kohsuke Kawaguchi
* @see UserProperties#LIST
*/
public abstract class UserProperty implements Describable<UserProperty>, ExtensionPoint {
/**
* The user object that owns this property.
* This value will be set by the Hudson code.
* Derived classes can expect this value to be always set.
*/
protected transient User user;
/*package*/ final void setUser(User u) {
this.user = u;
}
}

View File

@ -0,0 +1,21 @@
package hudson.model;
/**
* {@link Descriptor} for {@link UserProperty}.
*
* @author Kohsuke Kawaguchi
*/
public abstract class UserPropertyDescriptor extends Descriptor<UserProperty> {
protected UserPropertyDescriptor(Class<? extends UserProperty> clazz) {
super(clazz);
}
/**
* Creates a default instance of {@link UserProperty} to be associated
* with {@link User} that doesn't have any back up data store.
*
* @return null
* if the implementation choose not to add any proeprty object for such user.
*/
public abstract UserProperty newInstance(User user);
}

View File

@ -0,0 +1,136 @@
package hudson.model;
import hudson.Util;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
/**
* Represents a collection of {@link Job}s.
*
* @author Kohsuke Kawaguchi
*/
public class View extends JobCollection {
private final Hudson owner;
/**
* List of job names. This is what gets serialized.
*/
/*package*/ final Set<String> jobNames = new TreeSet<String>();
/**
* Name of this view.
*/
private String name;
/**
* Message displayed in the view page.
*/
private String description;
public View(Hudson owner, String name) {
this.name = name;
this.owner = owner;
}
/**
* Returns a read-only view of all {@link Job}s in this view.
*
* <p>
* This method returns a separate copy each time to avoid
* concurrent modification issue.
*/
public synchronized List<Job> getJobs() {
Job[] jobs = new Job[jobNames.size()];
int i=0;
for (String name : jobNames)
jobs[i++] = owner.getJob(name);
return Arrays.asList(jobs);
}
public boolean containsJob(Job job) {
return jobNames.contains(job.getName());
}
public String getViewName() {
return name;
}
public String getDescription() {
return description;
}
public String getDisplayName() {
return name;
}
public Job doCreateJob(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return null;
Job job = owner.doCreateJob(req, rsp);
if(job!=null) {
jobNames.add(job.getName());
owner.save();
}
return job;
}
public String getUrl() {
return "view/"+name+'/';
}
/**
* Accepts submission from the configuration page.
*/
public synchronized void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
jobNames.clear();
for (Job job : owner.getJobs()) {
if(req.getParameter(job.getName())!=null)
jobNames.add(job.getName());
}
description = Util.nullify(req.getParameter("description"));
owner.save();
rsp.sendRedirect(".");
}
/**
* Accepts the new description.
*/
public synchronized void doSubmitDescription( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
if(!Hudson.adminCheck(req,rsp))
return;
req.setCharacterEncoding("UTF-8");
description = req.getParameter("description");
owner.save();
rsp.sendRedirect("."); // go to the top page
}
/**
* Deletes this view.
*/
public synchronized void doDoDelete( StaplerRequest req, StaplerResponse rsp ) throws IOException {
if(!Hudson.adminCheck(req,rsp))
return;
owner.deleteView(this);
rsp.sendRedirect2(req.getContextPath()+"/");
}
}

View File

@ -0,0 +1,148 @@
package hudson.model;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.IOException;
import java.util.LinkedHashSet;
import java.util.SortedMap;
/**
* {@link Job} that monitors activities that happen outside Hudson,
* which requires occasional batch reload activity to obtain the up-to-date information.
*
* <p>
* This can be used as a base class to derive custom {@link Job} type.
*
* @author Kohsuke Kawaguchi
*/
public abstract class ViewJob<JobT extends ViewJob<JobT,RunT>, RunT extends Run<JobT,RunT>>
extends Job<JobT,RunT> {
/**
* We occasionally update the list of {@link Run}s from a file system.
* The next scheduled update time.
*/
private transient long nextUpdate = 0;
/**
* All {@link Run}s. Copy-on-write semantics.
*/
protected transient /*almost final*/ RunMap<RunT> runs = new RunMap<RunT>();
private transient boolean notLoaded = true;
/**
* If the reloading of runs are in progress (in another thread,
* set to true.)
*/
private transient volatile boolean reloadingInProgress;
/**
* {@link ExternalJob}s that need to be reloaded.
*
* This is a set, so no {@link ExternalJob}s are scheduled twice, yet
* it's order is predictable, avoiding starvation.
*/
private static final LinkedHashSet<ViewJob> reloadQueue = new LinkedHashSet<ViewJob>();
/*package*/ static final Thread reloadThread = new ReloadThread();
static {
reloadThread.start();
}
protected ViewJob(Hudson parent, String name) {
super(parent, name);
}
public boolean isBuildable() {
return false;
}
protected SortedMap<Integer,RunT> _getRuns() {
if(notLoaded || runs==null) {
// if none is loaded yet, do so immediately.
synchronized(this) {
if(runs==null)
runs = new RunMap<RunT>();
if(notLoaded) {
notLoaded = false;
_reload();
}
}
}
if(nextUpdate<System.currentTimeMillis()) {
if(!reloadingInProgress) {
// schedule a new reloading operation.
// we don't want to block the current thread,
// so reloading is done asynchronously.
reloadingInProgress = true;
synchronized(reloadQueue) {
reloadQueue.add(this);
reloadQueue.notify();
}
}
}
return runs;
}
public void removeRun(RunT run) {
// reload the info next time
nextUpdate = 0;
}
private void _reload() {
try {
reload();
} finally {
reloadingInProgress = false;
nextUpdate = System.currentTimeMillis()+1000;
}
}
/**
* Reloads the list of {@link Run}s. This operation can take a long time.
*
* <p>
* The loaded {@link Run}s should be set to {@link #runs}.
*/
protected abstract void reload();
public void doConfigSubmit( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
super.doConfigSubmit(req,rsp);
// make sure to reload to reflect this config change.
nextUpdate = 0;
}
/**
* Thread that reloads the {@link Run}s.
*/
private static final class ReloadThread extends Thread {
private ViewJob getNext() throws InterruptedException {
synchronized(reloadQueue) {
while(reloadQueue.isEmpty())
reloadQueue.wait();
ViewJob job = reloadQueue.iterator().next();
reloadQueue.remove(job);
return job;
}
}
public void run() {
while (true) {
try {
getNext()._reload();
} catch (InterruptedException e) {
// treat this as a death signal
return;
} catch (Throwable t) {
// otherwise ignore any error
t.printStackTrace();
}
}
}
}
// private static final Logger logger = Logger.getLogger(ViewJob.class.getName());
}

View File

@ -0,0 +1,119 @@
package hudson.model;
import hudson.Util;
import hudson.util.StreamTaskListener;
import java.io.File;
import java.io.FileFilter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Date;
import java.util.logging.Level;
/**
* Clean up old left-over workspaces from slaves.
*
* @author Kohsuke Kawaguchi
*/
public class WorkspaceCleanupThread extends PeriodicWork {
private static WorkspaceCleanupThread theInstance;
public WorkspaceCleanupThread() {
super("Workspace clean-up");
theInstance = this;
}
public static void invoke() {
theInstance.run();
}
private TaskListener listener;
protected void execute() {
Hudson h = Hudson.getInstance();
try {
// don't buffer this, so that the log shows what the worker thread is up to in real time
OutputStream os = new FileOutputStream(
new File(h.getRootDir(),"workspace-cleanup.log"));
try {
listener = new StreamTaskListener(os);
for (Slave s : h.getSlaves()) {
process(s);
}
process(h);
} finally {
os.close();
}
} catch (IOException e) {
logger.log(Level.SEVERE, "Failed to access log file",e);
}
}
private void process(Hudson h) {
File jobs = new File(h.getRootDir(), "jobs");
File[] dirs = jobs.listFiles(DIR_FILTER);
if(dirs==null) return;
for (File dir : dirs) {
File ws = new File(dir, "workspace");
if(shouldBeDeleted(dir.getName(),ws,h)) {
delete(ws);
}
}
}
private boolean shouldBeDeleted(String jobName, File dir, Node n) {
Job job = Hudson.getInstance().getJob(jobName);
if(job==null)
// no such project anymore
return true;
if(!dir.exists())
return false;
if (job instanceof Project) {
Project p = (Project) job;
Node lb = p.getLastBuiltOn();
if(lb!=null && lb.equals(n))
// this is the active workspace. keep it.
return false;
}
// if older than a month, delete
return dir.lastModified() + 30 * DAY < new Date().getTime();
}
private void process(Slave s) {
// TODO: we should be using launcher to execute remote rm -rf
listener.getLogger().println("Scanning "+s.getNodeName());
File[] dirs = s.getWorkspaceRoot().getLocal().listFiles(DIR_FILTER);
if(dirs ==null) return;
for (File dir : dirs) {
if(shouldBeDeleted(dir.getName(),dir,s))
delete(dir);
}
}
private void delete(File dir) {
try {
listener.getLogger().println("Deleting "+dir);
Util.deleteRecursive(dir);
} catch (IOException e) {
e.printStackTrace(listener.error("Failed to delete "+dir));
}
}
private static final FileFilter DIR_FILTER = new FileFilter() {
public boolean accept(File f) {
return f.isDirectory();
}
};
private static final long DAY = 1000*60*60*24;
}

View File

@ -0,0 +1,3 @@
<html><body>
Core object model that are bound to URLs via stapler, rooted at <a href="Hudson.html"><tt>Hudson</tt></a>.
</body></html>

View File

@ -0,0 +1,87 @@
/*
* Copyright 2002,2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
import java.util.Date;
import java.util.Vector;
/**
* CVS Entry.
*
* @version $Revision$ $Date$
*/
class CVSEntry {
private Date m_date;
private String m_author;
private final String m_comment;
private final Vector<RCSFile> m_files = new Vector<RCSFile>();
public CVSEntry(Date date, String author, String comment) {
m_date = date;
m_author = author;
m_comment = comment;
}
public void addFile(String file, String revision, String previousRevision, String branch, boolean dead) {
m_files.addElement(new RCSFile(file, revision, previousRevision, branch, dead));
}
// maybe null, in case of error
Date getDate() {
return m_date;
}
void setAuthor(final String author) {
m_author = author;
}
String getAuthor() {
return m_author;
}
String getComment() {
return m_comment;
}
Vector getFiles() {
return m_files;
}
/**
* Checks if any of the entries include a change to a branch.
*
* @param branch
* can be null to indicate the trunk.
*/
public boolean containsBranch(String branch) {
for (RCSFile file : m_files) {
String b = file.getBranch();
if(b==null && branch==null)
return true;
if(b==null || branch==null)
continue;
if(b.equals(branch))
return true;
}
return false;
}
public String toString() {
return getAuthor() + "\n" + getDate() + "\n" + getFiles() + "\n"
+ getComment();
}
}

View File

@ -0,0 +1,336 @@
/*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
// patched to work around http://issues.apache.org/bugzilla/show_bug.cgi?id=38583
import org.apache.tools.ant.Project;
import org.apache.tools.ant.Task;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.TimeZone;
import java.util.Map;
import java.util.HashMap;
import java.util.Map.Entry;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
/**
* A class used to parse the output of the CVS log command.
*
* @version $Revision$ $Date$
*/
class ChangeLogParser {
//private static final int GET_ENTRY = 0;
private static final int GET_FILE = 1;
private static final int GET_DATE = 2;
private static final int GET_COMMENT = 3;
private static final int GET_REVISION = 4;
private static final int GET_PREVIOUS_REV = 5;
private static final int GET_SYMBOLIC_NAMES = 6;
/**
* input format for dates read in from cvs log.
*
* Some users reported that they see different formats,
* so this is extended from original Ant version to cover different formats.
*/
private static final SimpleDateFormat[] c_inputDate
= new SimpleDateFormat[]{
new SimpleDateFormat("yyyy/MM/dd HH:mm:ss"),
new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"),
};
static {
TimeZone utc = TimeZone.getTimeZone("UTC");
for (SimpleDateFormat df : c_inputDate) {
df.setTimeZone(utc);
}
}
//The following is data used while processing stdout of CVS command
private String m_file;
private String m_date;
private String m_author;
private String m_comment;
private String m_revision;
private String m_previousRevision;
/**
* All branches available on the current file.
* Keyed by branch revision prefix (like "1.2.3." if files in the branch have revision numbers like
* "1.2.3.4") and the value is the branch name.
*/
private final Map<String,String> branches = new HashMap<String,String>();
/**
* True if the log record indicates deletion;
*/
private boolean m_dead;
private int m_status = GET_FILE;
/** rcs entries */
private final Hashtable<String,CVSEntry> m_entries = new Hashtable<String,CVSEntry>();
private final Task owner;
public ChangeLogParser(Task owner) {
this.owner = owner;
}
/**
* Get a list of rcs entries as an array.
*
* @return a list of rcs entries as an array
*/
CVSEntry[] getEntrySetAsArray() {
final CVSEntry[] array = new CVSEntry[ m_entries.size() ];
Enumeration e = m_entries.elements();
int i = 0;
while (e.hasMoreElements()) {
array[i++] = (CVSEntry) e.nextElement();
}
return array;
}
private boolean dead = false;
/**
* Receive notification about the process writing
* to standard output.
*/
public void stdout(final String line) {
if(dead)
return;
try {
switch(m_status) {
case GET_FILE:
// make sure attributes are reset when
// working on a 'new' file.
reset();
processFile(line);
break;
case GET_SYMBOLIC_NAMES:
processSymbolicName(line);
break;
case GET_REVISION:
processRevision(line);
break;
case GET_DATE:
processDate(line);
break;
case GET_COMMENT:
processComment(line);
break;
case GET_PREVIOUS_REV:
processGetPreviousRevision(line);
break;
}
} catch (Exception e) {
// we don't know how to handle the input any more. don't accept any more input
dead = true;
}
}
/**
* Process a line while in "GET_COMMENT" state.
*
* @param line the line
*/
private void processComment(final String line) {
final String lineSeparator = System.getProperty("line.separator");
if (line.startsWith("======")) {
//We have ended changelog for that particular file
//so we can save it
final int end
= m_comment.length() - lineSeparator.length(); //was -1
m_comment = m_comment.substring(0, end);
saveEntry();
m_status = GET_FILE;
} else if (line.startsWith("----------------------------")) {
final int end
= m_comment.length() - lineSeparator.length(); //was -1
m_comment = m_comment.substring(0, end);
m_status = GET_PREVIOUS_REV;
} else {
m_comment += line + lineSeparator;
}
}
/**
* Process a line while in "GET_FILE" state.
*
* @param line the line
*/
private void processFile(final String line) {
if (line.startsWith("Working file:")) {
m_file = line.substring(14, line.length());
m_status = GET_SYMBOLIC_NAMES;
}
}
/**
* Obtains the revision name list
*/
private void processSymbolicName(String line) {
if (line.startsWith("\t")) {
line = line.trim();
int idx = line.lastIndexOf(':');
if(idx<0) {
// ???
return;
}
String symbol = line.substring(0,idx);
Matcher m = DOT_PATTERN.matcher(line.substring(idx + 2));
if(!m.matches())
return; // not a branch name
branches.put(m.group(1)+m.group(3)+'.',symbol);
} else
if (line.startsWith("keyword substitution:")) {
m_status = GET_REVISION;
}
}
private static final Pattern DOT_PATTERN = Pattern.compile("(([0-9]+\\.)+)0\\.([0-9]+)");
/**
* Process a line while in "REVISION" state.
*
* @param line the line
*/
private void processRevision(final String line) {
if (line.startsWith("revision")) {
m_revision = line.substring(9);
m_status = GET_DATE;
} else if (line.startsWith("======")) {
//There was no revisions in this changelog
//entry so lets move unto next file
m_status = GET_FILE;
}
}
/**
* Process a line while in "DATE" state.
*
* @param line the line
*/
private void processDate(final String line) {
if (line.startsWith("date:")) {
m_date = line.substring(6, 25);
String lineData = line.substring(line.indexOf(";") + 1);
m_author = lineData.substring(10, lineData.indexOf(";"));
m_status = GET_COMMENT;
m_dead = lineData.indexOf("state: dead;")!=-1;
//Reset comment to empty here as we can accumulate multiple lines
//in the processComment method
m_comment = "";
}
}
/**
* Process a line while in "GET_PREVIOUS_REVISION" state.
*
* @param line the line
*/
private void processGetPreviousRevision(final String line) {
if (!line.startsWith("revision")) {
throw new IllegalStateException("Unexpected line from CVS: "
+ line);
}
m_previousRevision = line.substring(9);
saveEntry();
m_revision = m_previousRevision;
m_status = GET_DATE;
}
/**
* Utility method that saves the current entry.
*/
private void saveEntry() {
final String entryKey = m_date + m_author + m_comment;
CVSEntry entry;
if (!m_entries.containsKey(entryKey)) {
entry = new CVSEntry(parseDate(m_date), m_author, m_comment);
m_entries.put(entryKey, entry);
} else {
entry = m_entries.get(entryKey);
}
entry.addFile(m_file, m_revision, m_previousRevision, findBranch(m_revision), m_dead);
}
/**
* Finds the branch name that matches the revision, or null if not found.
*/
private String findBranch(String revision) {
if(revision==null) return null; // defensive check
for (Entry<String,String> e : branches.entrySet()) {
if(revision.startsWith(e.getKey()) && revision.substring(e.getKey().length()).indexOf('.')==-1)
return e.getValue();
}
return null;
}
/**
* Parse date out from expected format.
*
* @param date the string holding dat
* @return the date object or null if unknown date format
*/
private Date parseDate(String date) {
for (SimpleDateFormat df : c_inputDate) {
try {
return df.parse(date);
} catch (ParseException e) {
// try next if one fails
}
}
// nothing worked
owner.log("Failed to parse "+date+"\n", Project.MSG_ERR);
//final String message = REZ.getString( "changelog.bat-date.error", date );
//getContext().error( message );
return null;
}
/**
* reset all internal attributes except status.
*/
private void reset() {
m_file = null;
m_date = null;
m_author = null;
m_comment = null;
m_revision = null;
m_previousRevision = null;
m_dead = false;
branches.clear();
}
}

View File

@ -0,0 +1,429 @@
/*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Project;
import org.apache.tools.ant.taskdefs.AbstractCvsTask;
import org.apache.tools.ant.taskdefs.cvslib.CvsVersion;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.UnsupportedEncodingException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.Vector;
/**
* Examines the output of cvs log and group related changes together.
*
* It produces an XML output representing the list of changes.
* <PRE>
* <FONT color=#0000ff>&lt;!-- Root element --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> changelog <FONT color=#ff00ff>(entry</FONT><FONT color=#ff00ff>+</FONT><FONT color=#ff00ff>)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- CVS Entry --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> entry <FONT color=#ff00ff>(date,author,file</FONT><FONT color=#ff00ff>+</FONT><FONT color=#ff00ff>,msg)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- Date of cvs entry --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> date <FONT color=#ff00ff>(#PCDATA)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- Author of change --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> author <FONT color=#ff00ff>(#PCDATA)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- List of files affected --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> msg <FONT color=#ff00ff>(#PCDATA)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- File changed --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> file <FONT color=#ff00ff>(name,revision,prevrevision</FONT><FONT color=#ff00ff>?</FONT><FONT color=#ff00ff>)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- Name of the file --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> name <FONT color=#ff00ff>(#PCDATA)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- Revision number --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> revision <FONT color=#ff00ff>(#PCDATA)</FONT><FONT color=#6a5acd>&gt;</FONT>
* <FONT color=#0000ff>&lt;!-- Previous revision number --&gt;</FONT>
* <FONT color=#6a5acd>&lt;!ELEMENT</FONT> prevrevision <FONT color=#ff00ff>(#PCDATA)</FONT><FONT color=#6a5acd>&gt;</FONT>
* </PRE>
*
* @version $Revision$ $Date$
* @since Ant 1.5
* @ant.task name="cvschangelog" category="scm"
*/
public class ChangeLogTask extends AbstractCvsTask {
/** User list */
private File m_usersFile;
/** User list */
private Vector m_cvsUsers = new Vector();
/** Input dir */
private File m_dir;
/** Output file */
private File m_destfile;
/** The earliest date at which to start processing entries. */
private Date m_start;
/** The latest date at which to stop processing entries. */
private Date m_stop;
/**
* To filter out change logs for a certain branch, this variable will be the branch name.
* Otherwise null.
*/
private String branch;
/**
* Filesets containing list of files against which the cvs log will be
* performed. If empty then all files will in the working directory will
* be checked.
*/
private List<String> m_filesets = new ArrayList<String>();
/**
* Set the base dir for cvs.
*
* @param dir The new dir value
*/
public void setDir(final File dir) {
m_dir = dir;
}
/**
* Set the output file for the log.
*
* @param destfile The new destfile value
*/
public void setDestfile(final File destfile) {
m_destfile = destfile;
}
/**
* Set a lookup list of user names & addresses
*
* @param usersFile The file containing the users info.
*/
public void setUsersfile(final File usersFile) {
m_usersFile = usersFile;
}
/**
* Add a user to list changelog knows about.
*
* @param user the user
*/
public void addUser(final CvsUser user) {
m_cvsUsers.addElement(user);
}
/**
* Set the date at which the changelog should start.
*
* @param start The date at which the changelog should start.
*/
public void setStart(final Date start) {
m_start = start;
}
public void setBranch(String branch) {
this.branch = branch;
}
/**
* Set the date at which the changelog should stop.
*
* @param stop The date at which the changelog should stop.
*/
public void setEnd(final Date stop) {
m_stop = stop;
}
/**
* Set the number of days worth of log entries to process.
*
* @param days the number of days of log to process.
*/
public void setDaysinpast(final int days) {
final long time = System.currentTimeMillis()
- (long) days * 24 * 60 * 60 * 1000;
setStart(new Date(time));
}
/**
* Adds a file about which cvs logs will be generated.
*
* @param fileName
* fileName relative to {@link #setDir(File)}.
*/
public void addFile(String fileName) {
m_filesets.add(fileName);
}
public void setFile(List<String> files) {
m_filesets = files;
}
/**
* Execute task
*
* @exception BuildException if something goes wrong executing the
* cvs command
*/
public void execute() throws BuildException {
File savedDir = m_dir; // may be altered in validate
try {
validate();
final Properties userList = new Properties();
loadUserlist(userList);
for (Enumeration e = m_cvsUsers.elements();
e.hasMoreElements();) {
final CvsUser user = (CvsUser) e.nextElement();
user.validate();
userList.put(user.getUserID(), user.getDisplayname());
}
setCommand("log");
if (getTag() != null) {
CvsVersion myCvsVersion = new CvsVersion();
myCvsVersion.setProject(getProject());
myCvsVersion.setTaskName("cvsversion");
myCvsVersion.setCvsRoot(getCvsRoot());
myCvsVersion.setCvsRsh(getCvsRsh());
myCvsVersion.setPassfile(getPassFile());
myCvsVersion.setDest(m_dir);
myCvsVersion.execute();
if (myCvsVersion.supportsCvsLogWithSOption()) {
addCommandArgument("-S");
}
}
if (null != m_start) {
final SimpleDateFormat outputDate =
new SimpleDateFormat("yyyy-MM-dd");
// Kohsuke patch:
// probably due to timezone difference between server/client and
// the lack of precise specification in the protocol or something,
// sometimes the java.net CVS server (and probably others) don't
// always report all the changes that have happened in the given day.
// so let's take the date range bit wider, to make sure that
// the server sends us all the logs that we care.
//
// the only downside of this change is that it will increase the traffic
// unnecessarily, but given that in Hudson we already narrow down the scope
// by specifying files, this should be acceptable increase.
Date safeStart = new Date(m_start.getTime()-1000L*60*60*24);
// Kohsuke patch until here
// We want something of the form: -d ">=YYYY-MM-dd"
final String dateRange = ">=" + outputDate.format(safeStart);
// Supply '-d' as a separate argument - Bug# 14397
addCommandArgument("-d");
addCommandArgument(dateRange);
}
// Check if list of files to check has been specified
if (!m_filesets.isEmpty()) {
for (String file : m_filesets) {
addCommandArgument(file);
}
}
final ChangeLogParser parser = new ChangeLogParser(this);
final RedirectingStreamHandler handler =
new RedirectingStreamHandler(parser);
log(getCommand(), Project.MSG_VERBOSE);
setDest(m_dir);
setExecuteStreamHandler(handler);
try {
super.execute();
} finally {
final String errors = handler.getErrors();
if (null != errors && errors.length()!=0) {
log(errors, Project.MSG_ERR);
}
}
final CVSEntry[] entrySet = parser.getEntrySetAsArray();
final CVSEntry[] filteredEntrySet = filterEntrySet(entrySet);
replaceAuthorIdWithName(userList, filteredEntrySet);
writeChangeLog(filteredEntrySet);
} finally {
m_dir = savedDir;
}
}
/**
* Validate the parameters specified for task.
*
* @throws BuildException if fails validation checks
*/
private void validate()
throws BuildException {
if (null == m_dir) {
m_dir = getProject().getBaseDir();
}
if (null == m_destfile) {
final String message = "Destfile must be set.";
throw new BuildException(message);
}
if (!m_dir.exists()) {
final String message = "Cannot find base dir "
+ m_dir.getAbsolutePath();
throw new BuildException(message);
}
if (null != m_usersFile && !m_usersFile.exists()) {
final String message = "Cannot find user lookup list "
+ m_usersFile.getAbsolutePath();
throw new BuildException(message);
}
}
/**
* Load the userlist from the userList file (if specified) and add to
* list of users.
*
* @param userList the file of users
* @throws BuildException if file can not be loaded for some reason
*/
private void loadUserlist(final Properties userList)
throws BuildException {
if (null != m_usersFile) {
try {
userList.load(new FileInputStream(m_usersFile));
} catch (final IOException ioe) {
throw new BuildException(ioe.toString(), ioe);
}
}
}
/**
* Filter the specified entries according to an appropriate rule.
*
* @param entrySet the entry set to filter
* @return the filtered entry set
*/
private CVSEntry[] filterEntrySet(final CVSEntry[] entrySet) {
final Vector results = new Vector();
for (int i = 0; i < entrySet.length; i++) {
final CVSEntry cvsEntry = entrySet[i];
final Date date = cvsEntry.getDate();
if(date==null)
// skip dates that didn't parse.
continue;
if (null != m_start && m_start.after(date)) {
//Skip dates that are too early
continue;
}
if (null != m_stop && m_stop.before(date)) {
//Skip dates that are too late
continue;
}
if (!cvsEntry.containsBranch(branch))
// didn't match the branch
continue;
results.addElement(cvsEntry);
}
final CVSEntry[] resultArray = new CVSEntry[results.size()];
results.copyInto(resultArray);
return resultArray;
}
/**
* replace all known author's id's with their maven specified names
*/
private void replaceAuthorIdWithName(final Properties userList,
final CVSEntry[] entrySet) {
for (int i = 0; i < entrySet.length; i++) {
final CVSEntry entry = entrySet[ i ];
if (userList.containsKey(entry.getAuthor())) {
entry.setAuthor(userList.getProperty(entry.getAuthor()));
}
}
}
/**
* Print changelog to file specified in task.
*
* @param entrySet the entry set to write.
* @throws BuildException if there is an error writing changelog.
*/
private void writeChangeLog(final CVSEntry[] entrySet)
throws BuildException {
FileOutputStream output = null;
try {
output = new FileOutputStream(m_destfile);
final PrintWriter writer =
new PrintWriter(new OutputStreamWriter(output, "UTF-8"));
final ChangeLogWriter serializer = new ChangeLogWriter();
serializer.printChangeLog(writer, entrySet);
} catch (final UnsupportedEncodingException uee) {
getProject().log(uee.toString(), Project.MSG_ERR);
} catch (final IOException ioe) {
throw new BuildException(ioe.toString(), ioe);
} finally {
if (null != output) {
try {
output.close();
} catch (final IOException ioe) {
}
}
}
}
}

View File

@ -0,0 +1,105 @@
/*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
import java.io.PrintWriter;
import java.text.SimpleDateFormat;
import java.util.Enumeration;
import java.util.TimeZone;
/**
* Class used to generate an XML changelog.
*
* @version $Revision$ $Date$
*/
class ChangeLogWriter {
/** output format for dates written to xml file */
private static final SimpleDateFormat c_outputDate
= new SimpleDateFormat("yyyy-MM-dd");
/** output format for times written to xml file */
private static final SimpleDateFormat c_outputTime
= new SimpleDateFormat("HH:mm");
static {
TimeZone utc = TimeZone.getTimeZone("UTC");
c_outputDate.setTimeZone(utc);
c_outputTime.setTimeZone(utc);
}
/**
* Print out the specified entries.
*
* @param output writer to which to send output.
* @param entries the entries to be written.
*/
public void printChangeLog(final PrintWriter output,
final CVSEntry[] entries) {
output.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>");
output.println("<changelog>");
for (int i = 0; i < entries.length; i++) {
final CVSEntry entry = entries[i];
printEntry(output, entry);
}
output.println("</changelog>");
output.flush();
output.close();
}
/**
* Print out an individual entry in changelog.
*
* @param entry the entry to print
* @param output writer to which to send output.
*/
private void printEntry(final PrintWriter output, final CVSEntry entry) {
output.println("\t<entry>");
output.println("\t\t<date>" + c_outputDate.format(entry.getDate())
+ "</date>");
output.println("\t\t<time>" + c_outputTime.format(entry.getDate())
+ "</time>");
output.println("\t\t<author><![CDATA[" + entry.getAuthor()
+ "]]></author>");
final Enumeration enumeration = entry.getFiles().elements();
while (enumeration.hasMoreElements()) {
final RCSFile file = (RCSFile) enumeration.nextElement();
output.println("\t\t<file>");
output.println("\t\t\t<name>" + file.getName() + "</name>");
output.println("\t\t\t<revision>" + file.getRevision()
+ "</revision>");
final String previousRevision = file.getPreviousRevision();
if (previousRevision != null) {
output.println("\t\t\t<prevrevision>" + previousRevision
+ "</prevrevision>");
}
if(file.isDead())
output.println("\t\t\t<dead />");
output.println("\t\t</file>");
}
output.println("\t\t<msg><![CDATA[" + entry.getComment() + "]]></msg>");
output.println("\t</entry>");
}
}

View File

@ -0,0 +1,93 @@
/*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
import org.apache.tools.ant.BuildException;
/**
* Represents a CVS user with a userID and a full name.
*
* @version $Revision$ $Date$
*/
public class CvsUser {
/** The user's Id */
private String m_userID;
/** The user's full name */
private String m_displayName;
/**
* Set the user's fullname
*
* @param displayName the user's full name
*/
public void setDisplayname(final String displayName) {
m_displayName = displayName;
}
/**
* Set the user's id
*
* @param userID the user's new id value.
*/
public void setUserid(final String userID) {
m_userID = userID;
}
/**
* Get the user's id.
*
* @return The userID value
*/
String getUserID() {
return m_userID;
}
/**
* Get the user's full name
*
* @return the user's full name
*/
String getDisplayname() {
return m_displayName;
}
/**
* validate that this object is configured.
*
* @exception BuildException if the instance has not be correctly
* configured.
*/
void validate() throws BuildException {
if (null == m_userID) {
final String message = "Username attribute must be set.";
throw new BuildException(message);
}
if (null == m_displayName) {
final String message =
"Displayname attribute must be set for userID " + m_userID;
throw new BuildException(message);
}
}
}

View File

@ -0,0 +1,71 @@
/*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
/**
* Represents a RCS File change.
*
* @version $Revision$ $Date$
*/
class RCSFile {
private String m_name;
private String m_revision;
private String m_previousRevision;
private boolean m_dead;
private String m_branch;
RCSFile(final String name,
final String revision,
final String previousRevision,
final String branch,
final boolean dead) {
m_name = name;
m_revision = revision;
if (!revision.equals(previousRevision)) {
m_previousRevision = previousRevision;
}
m_branch = branch;
m_dead = dead;
}
String getName() {
return m_name;
}
String getRevision() {
return m_revision;
}
String getPreviousRevision() {
return m_previousRevision;
}
boolean isDead() {
return m_dead;
}
/**
* Gets the name of this branch, if available.
*/
String getBranch() {
return m_branch;
}
}

View File

@ -0,0 +1,51 @@
/*
* Copyright 2002,2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
import org.apache.tools.ant.taskdefs.LogOutputStream;
/**
* A dummy stream that just passes stuff to the parser.
*
* @version $Revision$ $Date$
*/
class RedirectingOutputStream
extends LogOutputStream {
private final ChangeLogParser parser;
/**
* Creates a new instance of this class.
*
* @param parser the parser to which output is sent.
*/
public RedirectingOutputStream(final ChangeLogParser parser) {
super(null, 0);
this.parser = parser;
}
/**
* Logs a line to the log system of ant.
*
* @param line the line to log.
*/
protected void processLine(final String line) {
parser.stdout(line);
}
}

View File

@ -0,0 +1,61 @@
/*
* Copyright 2002,2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package hudson.org.apache.tools.ant.taskdefs.cvslib;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.taskdefs.PumpStreamHandler;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
/**
* A dummy stream handler that just passes stuff to the parser.
*
* @version $Revision$ $Date$
*/
class RedirectingStreamHandler
extends PumpStreamHandler {
RedirectingStreamHandler(final ChangeLogParser parser) {
super(new RedirectingOutputStream(parser),
new ByteArrayOutputStream());
}
String getErrors() {
try {
final ByteArrayOutputStream error
= (ByteArrayOutputStream) getErr();
return error.toString("ASCII");
} catch (final Exception e) {
return null;
}
}
public void stop() {
super.stop();
try {
getErr().close();
getOut().close();
} catch (final IOException e) {
// plain impossible
throw new BuildException(e);
}
}
}

View File

@ -0,0 +1,70 @@
package hudson.scheduler;
import antlr.ANTLRException;
import antlr.LLkParser;
import antlr.ParserSharedInputState;
import antlr.SemanticException;
import antlr.Token;
import antlr.TokenBuffer;
import antlr.TokenStream;
import antlr.TokenStreamException;
/**
* @author Kohsuke Kawaguchi
*/
abstract class BaseParser extends LLkParser {
private static final int[] LOWER_BOUNDS = new int[] {0,0,1,0,0};
private static final int[] UPPER_BOUNDS = new int[] {59,23,31,12,7};
protected BaseParser(int i) {
super(i);
}
protected BaseParser(ParserSharedInputState parserSharedInputState, int i) {
super(parserSharedInputState, i);
}
protected BaseParser(TokenBuffer tokenBuffer, int i) {
super(tokenBuffer, i);
}
protected BaseParser(TokenStream tokenStream, int i) {
super(tokenStream, i);
}
protected long doRange(int start, int end, int step, int field) throws ANTLRException {
rangeCheck(start, field);
rangeCheck(end, field);
if (step <= 0)
error("step must be positive, but found " + step);
if (start>end)
error("You mean "+end+'-'+start+'?');
long bits = 0;
for (int i = start; i <= end; i += step) {
bits |= 1L << i;
}
return bits;
}
protected long doRange( int step, int field ) throws ANTLRException {
return doRange( LOWER_BOUNDS[field], UPPER_BOUNDS[field], step, field );
}
protected void rangeCheck(int value, int field) throws ANTLRException {
if( value<LOWER_BOUNDS[field] || UPPER_BOUNDS[field]<value ) {
error(value +" is an invalid value. Must be within "+
LOWER_BOUNDS[field]+" and "+UPPER_BOUNDS[field]);
}
}
private void error(String msg) throws TokenStreamException, SemanticException {
Token token = LT(0);
throw new SemanticException(
msg,
token.getFilename(),
token.getLine(),
token.getColumn()
);
}
}

View File

@ -0,0 +1,78 @@
package hudson.scheduler;
import antlr.ANTLRException;
import java.io.StringReader;
import java.util.Calendar;
/**
* Table for driving scheduled tasks.
*
* @author Kohsuke Kawaguchi
*/
public final class CronTab {
/**
* bits[0]: minutes
* bits[1]: hours
* bits[2]: days
* bits[3]: months
*
* false:not scheduled &lt;-> true scheduled
*/
final long[] bits = new long[4];
int dayOfWeek;
public CronTab(String format) throws ANTLRException {
this(format,1);
}
public CronTab(String format, int line) throws ANTLRException {
CrontabLexer lexer = new CrontabLexer(new StringReader(format));
lexer.setLine(line);
CrontabParser parser = new CrontabParser(lexer);
parser.startRule(this);
if((dayOfWeek&(1<<7))!=0)
dayOfWeek |= 1; // copy bit 7 over to bit 0
}
/**
* Returns true if the given calendar matches
*/
boolean check(Calendar cal) {
if(!checkBits(bits[0],cal.get(Calendar.MINUTE)))
return false;
if(!checkBits(bits[1],cal.get(Calendar.HOUR_OF_DAY)))
return false;
if(!checkBits(bits[2],cal.get(Calendar.DAY_OF_MONTH)))
return false;
if(!checkBits(bits[3],cal.get(Calendar.MONTH)+1))
return false;
if(!checkBits(dayOfWeek,cal.get(Calendar.DAY_OF_WEEK)-1))
return false;
return true;
}
/**
* Returns true if n-th bit is on.
*/
private boolean checkBits(long bitMask, int n) {
return (bitMask|(1L<<n))==bitMask;
}
public String toString() {
return super.toString()+"["+
toString("minute",bits[0])+','+
toString("hour",bits[1])+','+
toString("dayOfMonth",bits[2])+','+
toString("month",bits[3])+','+
toString("dayOfWeek",dayOfWeek)+']';
}
private String toString(String key, long bit) {
return key+'='+Long.toHexString(bit);
}
}

View File

@ -0,0 +1,45 @@
package hudson.scheduler;
import antlr.ANTLRException;
import java.util.Calendar;
import java.util.Collection;
import java.util.Vector;
/**
* {@link CronTab} list (logically OR-ed).
*
* @author Kohsuke Kawaguchi
*/
public final class CronTabList {
private final Vector<CronTab> tabs;
public CronTabList(Collection<CronTab> tabs) {
this.tabs = new Vector<CronTab>(tabs);
}
public synchronized boolean check(Calendar cal) {
for (CronTab tab : tabs) {
if(tab.check(cal))
return true;
}
return false;
}
public static CronTabList create(String format) throws ANTLRException {
Vector<CronTab> r = new Vector<CronTab>();
int lineNumber = 0;
for (String line : format.split("\\r?\\n")) {
lineNumber++;
line = line.trim();
if(line.length()==0 || line.startsWith("#"))
continue; // ignorable line
try {
r.add(new CronTab(line,lineNumber));
} catch (ANTLRException e) {
throw new ANTLRException("Invalid input: \""+line+"\": "+e.toString(),e);
}
}
return new CronTabList(r);
}
}

View File

@ -0,0 +1,5 @@
<html>
<body>
Classes that implement cron-like features
</body>
</html>

View File

@ -0,0 +1,80 @@
package hudson.scm;
import hudson.EnvVars;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Proc;
import hudson.model.BuildListener;
import hudson.model.TaskListener;
import hudson.util.ArgumentListBuilder;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.util.HashMap;
import java.util.Map;
/**
* Common implementation between {@link CVSSCM} and {@link SubversionSCM}.
*
* @author Kohsuke Kawaguchi
*/
abstract class AbstractCVSFamilySCM implements SCM {
/**
* Invokes the command with the specified command line option and wait for its completion.
*
* @param dir
* if launching locally this is a local path, otherwise a remote path.
* @param out
* Receives output from the executed program.
*/
protected final boolean run(Launcher launcher, ArgumentListBuilder cmd, TaskListener listener, FilePath dir, OutputStream out) throws IOException {
Map env = createEnvVarMap(true);
int r = launcher.launch(cmd.toCommandArray(),env,out,dir).join();
if(r!=0)
listener.fatalError(getDescriptor().getDisplayName()+" failed");
return r==0;
}
protected final boolean run(Launcher launcher, ArgumentListBuilder cmd, TaskListener listener, FilePath dir) throws IOException {
return run(launcher,cmd,listener,dir,listener.getLogger());
}
/**
*
* @param overrideOnly
* true to indicate that the returned map shall only contain
* properties that need to be overridden. This is for use with {@link Launcher}.
* false to indicate that the map should contain complete map.
* This is to invoke {@link Proc} directly.
*/
protected final Map createEnvVarMap(boolean overrideOnly) {
Map env = new HashMap();
if(!overrideOnly)
env.putAll(EnvVars.masterEnvVars);
buildEnvVars(env);
return env;
}
protected final boolean createEmptyChangeLog(File changelogFile, BuildListener listener, String rootTag) {
try {
FileWriter w = new FileWriter(changelogFile);
w.write("<"+rootTag +"/>");
w.close();
return true;
} catch (IOException e) {
e.printStackTrace(listener.error(e.getMessage()));
return false;
}
}
protected final String nullify(String s) {
if(s==null) return null;
if(s.trim().length()==0) return null;
return s;
}
}

View File

@ -0,0 +1,17 @@
package hudson.scm;
import hudson.model.Build;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
/**
* {@link ChangeLogParser} for CVS.
* @author Kohsuke Kawaguchi
*/
public class CVSChangeLogParser extends ChangeLogParser {
public CVSChangeLogSet parse(Build build, File changelogFile) throws IOException, SAXException {
return CVSChangeLogSet.parse(changelogFile);
}
}

View File

@ -0,0 +1,206 @@
package hudson.scm;
import org.apache.commons.digester.Digester;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Iterator;
import hudson.model.User;
import hudson.scm.CVSChangeLogSet.CVSChangeLog;
/**
* {@link ChangeLogSet} for CVS.
* @author Kohsuke Kawaguchi
*/
public final class CVSChangeLogSet extends ChangeLogSet<CVSChangeLog> {
private List<CVSChangeLog> logs;
public CVSChangeLogSet(List<CVSChangeLog> logs) {
this.logs = Collections.unmodifiableList(logs);
}
/**
* Returns the read-only list of changes.
*/
public List<CVSChangeLog> getLogs() {
return logs;
}
@Override
public boolean isEmptySet() {
return logs.isEmpty();
}
public Iterator<CVSChangeLog> iterator() {
return logs.iterator();
}
public static CVSChangeLogSet parse( java.io.File f ) throws IOException, SAXException {
Digester digester = new Digester();
ArrayList<CVSChangeLog> r = new ArrayList<CVSChangeLog>();
digester.push(r);
digester.addObjectCreate("*/entry",CVSChangeLog.class);
digester.addBeanPropertySetter("*/entry/date");
digester.addBeanPropertySetter("*/entry/time");
digester.addBeanPropertySetter("*/entry/author","user");
digester.addBeanPropertySetter("*/entry/msg");
digester.addSetNext("*/entry","add");
digester.addObjectCreate("*/entry/file",File.class);
digester.addBeanPropertySetter("*/entry/file/name");
digester.addBeanPropertySetter("*/entry/file/revision");
digester.addBeanPropertySetter("*/entry/file/prevrevision");
digester.addCallMethod("*/entry/file/dead","setDead");
digester.addSetNext("*/entry/file","addFile");
digester.parse(f);
// merge duplicate entries. Ant task somehow seems to report duplicate entries.
for(int i=r.size()-1; i>=0; i--) {
CVSChangeLog log = r.get(i);
boolean merged = false;
for(int j=0;j<i;j++) {
CVSChangeLog c = r.get(j);
if(c.canBeMergedWith(log)) {
c.merge(log);
merged = true;
break;
}
}
if(merged)
r.remove(log);
}
return new CVSChangeLogSet(r);
}
/**
* In-memory representation of CVS Changelog.
*/
public static class CVSChangeLog extends ChangeLogSet.Entry {
private String date;
private String time;
private User author;
private String msg;
private final List<File> files = new ArrayList<File>();
/**
* Checks if two {@link CVSChangeLog} entries can be merged.
* This is to work around the duplicate entry problems.
*/
public boolean canBeMergedWith(CVSChangeLog that) {
if(!this.date.equals(that.date))
return false;
if(!this.time.equals(that.time)) // TODO: perhaps check this loosely?
return false;
if(this.author==null || that.author==null || !this.author.equals(that.author))
return false;
if(!this.msg.equals(that.msg))
return false;
return true;
}
public void merge(CVSChangeLog that) {
this.files.addAll(that.files);
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getTime() {
return time;
}
public void setTime(String time) {
this.time = time;
}
public User getAuthor() {
return author;
}
public void setUser(String author) {
this.author = User.get(author);
}
public String getUser() {// digester wants read/write property, even though it never reads. Duh.
return author.getDisplayName();
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public void addFile( File f ) {
files.add(f);
}
public List<File> getFiles() {
return files;
}
}
public static class File {
private String name;
private String revision;
private String prevrevision;
private boolean dead;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getRevision() {
return revision;
}
public void setRevision(String revision) {
this.revision = revision;
}
public String getPrevrevision() {
return prevrevision;
}
public void setPrevrevision(String prevrevision) {
this.prevrevision = prevrevision;
}
public boolean isDead() {
return dead;
}
public void setDead() {
this.dead = true;
}
public EditType getEditType() {
// see issue #73. Can't do much better right now
if(dead)
return EditType.DELETE;
if(revision.equals("1.1"))
return EditType.ADD;
return EditType.EDIT;
}
}
}

View File

@ -0,0 +1,873 @@
package hudson.scm;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Proc;
import hudson.Util;
import static hudson.Util.fixEmpty;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Hudson;
import hudson.model.ModelObject;
import hudson.model.Project;
import hudson.model.Result;
import hudson.model.StreamBuildListener;
import hudson.model.TaskListener;
import hudson.org.apache.tools.ant.taskdefs.cvslib.ChangeLogTask;
import hudson.util.ArgumentListBuilder;
import hudson.util.ForkOutputStream;
import hudson.util.FormFieldValidator;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.taskdefs.Expand;
import org.apache.tools.zip.ZipEntry;
import org.apache.tools.zip.ZipOutputStream;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.io.StringWriter;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.Set;
import java.util.HashSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* CVS.
*
* <p>
* I couldn't call this class "CVS" because that would cause the view folder name
* to collide with CVS control files.
*
* @author Kohsuke Kawaguchi
*/
public class CVSSCM extends AbstractCVSFamilySCM {
/**
* CVSSCM connection string.
*/
private String cvsroot;
/**
* Module names.
*
* This could be a whitespace-separate list of multiple modules.
*/
private String module;
/**
* Branch to build. Null to indicate the trunk.
*/
private String branch;
private String cvsRsh;
private boolean canUseUpdate;
/**
* True to avoid creating a sub-directory inside the workspace.
* (Works only when there's just one module.)
*/
private boolean flatten;
public CVSSCM(String cvsroot, String module,String branch,String cvsRsh,boolean canUseUpdate, boolean flatten) {
this.cvsroot = cvsroot;
this.module = module.trim();
this.branch = nullify(branch);
this.cvsRsh = nullify(cvsRsh);
this.canUseUpdate = canUseUpdate;
this.flatten = flatten && module.indexOf(' ')==-1;
}
public String getCvsRoot() {
return cvsroot;
}
/**
* If there are multiple modules, return the module directory of the first one.
* @param workspace
*/
public FilePath getModuleRoot(FilePath workspace) {
if(flatten)
return workspace;
int idx = module.indexOf(' ');
if(idx>=0) return workspace.child(module.substring(0,idx));
else return workspace.child(module);
}
public ChangeLogParser createChangeLogParser() {
return new CVSChangeLogParser();
}
public String getAllModules() {
return module;
}
public String getBranch() {
return branch;
}
public String getCvsRsh() {
return cvsRsh;
}
public boolean getCanUseUpdate() {
return canUseUpdate;
}
public boolean isFlatten() {
return flatten;
}
public boolean pollChanges(Project project, Launcher launcher, FilePath dir, TaskListener listener) throws IOException {
List<String> changedFiles = update(true, launcher, dir, listener);
return changedFiles!=null && !changedFiles.isEmpty();
}
public boolean checkout(Build build, Launcher launcher, FilePath dir, BuildListener listener, File changelogFile) throws IOException {
List<String> changedFiles = null; // files that were affected by update. null this is a check out
if(canUseUpdate && isUpdatable(dir.getLocal())) {
changedFiles = update(false,launcher,dir,listener);
if(changedFiles==null)
return false; // failed
} else {
dir.deleteContents();
ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add("cvs","-Q","-z9","-d",cvsroot,"co");
if(branch!=null)
cmd.add("-r",branch);
if(flatten)
cmd.add("-d",dir.getName());
cmd.addTokenized(module);
if(!run(launcher,cmd,listener, flatten ? dir.getParent() : dir))
return false;
}
// archive the workspace to support later tagging
// TODO: doing this partially remotely would be faster
File archiveFile = getArchiveFile(build);
ZipOutputStream zos = new ZipOutputStream(archiveFile);
if(flatten) {
archive(build.getProject().getWorkspace().getLocal(), module, zos);
} else {
StringTokenizer tokens = new StringTokenizer(module);
while(tokens.hasMoreTokens()) {
String m = tokens.nextToken();
archive(new File(build.getProject().getWorkspace().getLocal(),m),m,zos);
}
}
zos.close();
// contribute the tag action
build.getActions().add(new TagAction(build));
return calcChangeLog(build, changedFiles, changelogFile, listener);
}
/**
* Returns the file name used to archive the build.
*/
private static File getArchiveFile(Build build) {
return new File(build.getRootDir(),"workspace.zip");
}
private void archive(File dir,String relPath,ZipOutputStream zos) throws IOException {
Set<String> knownFiles = new HashSet<String>();
// see http://www.monkey.org/openbsd/archive/misc/9607/msg00056.html for what Entries.Log is for
parseCVSEntries(new File(dir,"CVS/Entries"),knownFiles);
parseCVSEntries(new File(dir,"CVS/Entries.Log"),knownFiles);
parseCVSEntries(new File(dir,"CVS/Entries.Extra"),knownFiles);
boolean hasCVSdirs = !knownFiles.isEmpty();
knownFiles.add("CVS");
File[] files = dir.listFiles();
if(files==null)
throw new IOException("No such directory exists. Did you specify the correct branch?: "+dir);
for( File f : files ) {
String name = relPath+'/'+f.getName();
if(f.isDirectory()) {
if(hasCVSdirs && !knownFiles.contains(f.getName())) {
// not controlled in CVS. Skip.
// but also make sure that we archive CVS/*, which doesn't have CVS/CVS
continue;
}
archive(f,name,zos);
} else {
if(!dir.getName().equals("CVS"))
// we only need to archive CVS control files, not the actual workspace files
continue;
zos.putNextEntry(new ZipEntry(name));
FileInputStream fis = new FileInputStream(f);
Util.copyStream(fis,zos);
fis.close();
zos.closeEntry();
}
}
}
/**
* Parses the CVS/Entries file and adds file/directory names to the list.
*/
private void parseCVSEntries(File entries, Set<String> knownFiles) throws IOException {
if(!entries.exists())
return;
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(entries)));
String line;
while((line=in.readLine())!=null) {
String[] tokens = line.split("/+");
if(tokens==null || tokens.length<2) continue; // invalid format
knownFiles.add(tokens[1]);
}
in.close();
}
/**
* Updates the workspace as well as locate changes.
*
* @return
* List of affected file names, relative to the workspace directory.
* Null if the operation failed.
*/
public List<String> update(boolean dryRun, Launcher launcher, FilePath workspace, TaskListener listener) throws IOException {
List<String> changedFileNames = new ArrayList<String>(); // file names relative to the workspace
ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add("cvs","-q","-z9");
if(dryRun)
cmd.add("-n");
cmd.add("update","-PdC");
if(flatten) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
if(!run(launcher,cmd,listener,workspace,
new ForkOutputStream(baos,listener.getLogger())))
return null;
parseUpdateOutput("",baos, changedFileNames);
} else {
StringTokenizer tokens = new StringTokenizer(module);
while(tokens.hasMoreTokens()) {
String moduleName = tokens.nextToken();
// capture the output during update
ByteArrayOutputStream baos = new ByteArrayOutputStream();
if(!run(launcher,cmd,listener,
new FilePath(workspace, moduleName),
new ForkOutputStream(baos,listener.getLogger())))
return null;
// we'll run one "cvs log" command with workspace as the base,
// so use path names that are relative to moduleName.
parseUpdateOutput(moduleName+'/',baos, changedFileNames);
}
}
return changedFileNames;
}
// see http://www.network-theory.co.uk/docs/cvsmanual/cvs_153.html for the output format.
// we don't care '?' because that's not in the repository
private static final Pattern UPDATE_LINE = Pattern.compile("[UPARMC] (.+)");
private static final Pattern REMOVAL_LINE = Pattern.compile("cvs (server|update): (.+) is no longer in the repository");
private static final Pattern NEWDIRECTORY_LINE = Pattern.compile("cvs server: New directory `(.+)' -- ignored");
/**
* Parses the output from CVS update and list up files that might have been changed.
*
* @param result
* list of file names whose changelog should be checked. This may include files
* that are no longer present. The path names are relative to the workspace,
* hence "String", not {@link File}.
*/
private void parseUpdateOutput(String baseName, ByteArrayOutputStream output, List<String> result) throws IOException {
BufferedReader in = new BufferedReader(new InputStreamReader(
new ByteArrayInputStream(output.toByteArray())));
String line;
while((line=in.readLine())!=null) {
Matcher matcher = UPDATE_LINE.matcher(line);
if(matcher.matches()) {
result.add(baseName+matcher.group(1));
continue;
}
matcher= REMOVAL_LINE.matcher(line);
if(matcher.matches()) {
result.add(baseName+matcher.group(2));
continue;
}
// this line is added in an attempt to capture newly created directories in the repository,
// but it turns out that this line always hit if the workspace is missing a directory
// that the server has, even if that directory contains nothing in it
//matcher= NEWDIRECTORY_LINE.matcher(line);
//if(matcher.matches()) {
// result.add(baseName+matcher.group(1));
//}
}
}
/**
* Returns true if we can use "cvs update" instead of "cvs checkout"
*/
private boolean isUpdatable(File dir) {
if(flatten) {
return isUpdatableModule(dir);
} else {
StringTokenizer tokens = new StringTokenizer(module);
while(tokens.hasMoreTokens()) {
File module = new File(dir,tokens.nextToken());
if(!isUpdatableModule(module))
return false;
}
return true;
}
}
private boolean isUpdatableModule(File module) {
File cvs = new File(module,"CVS");
if(!cvs.exists())
return false;
// check cvsroot
if(!checkContents(new File(cvs,"Root"),cvsroot))
return false;
if(branch!=null) {
if(!checkContents(new File(cvs,"Tag"),'T'+branch))
return false;
} else {
if(new File(cvs,"Tag").exists())
return false;
}
return true;
}
/**
* Returns true if the contents of the file is equal to the given string.
*
* @return false in all the other cases.
*/
private boolean checkContents(File file, String contents) {
try {
Reader r = new FileReader(file);
try {
String s = new BufferedReader(r).readLine();
if (s == null) return false;
return s.trim().equals(contents.trim());
} finally {
r.close();
}
} catch (IOException e) {
return false;
}
}
/**
* Computes the changelog into an XML file.
*
* <p>
* When we update the workspace, we'll compute the changelog by using its output to
* make it faster. In general case, we'll fall back to the slower approach where
* we check all files in the workspace.
*
* @param changedFiles
* Files whose changelog should be checked for updates.
* This is provided if the previous operation is update, otherwise null,
* which means we have to fall back to the default slow computation.
*/
private boolean calcChangeLog(Build build, List<String> changedFiles, File changelogFile, final BuildListener listener) {
if(build.getPreviousBuild()==null || (changedFiles!=null && changedFiles.isEmpty())) {
// nothing to compare against, or no changes
// (note that changedFiles==null means fallback, so we have to run cvs log.
listener.getLogger().println("$ no changes detected");
return createEmptyChangeLog(changelogFile,listener, "changelog");
}
listener.getLogger().println("$ computing changelog");
final StringWriter errorOutput = new StringWriter();
final boolean[] hadError = new boolean[1];
ChangeLogTask task = new ChangeLogTask() {
public void log(String msg, int msgLevel) {
// send error to listener. This seems like the route in which the changelog task
// sends output
if(msgLevel==org.apache.tools.ant.Project.MSG_ERR) {
hadError[0] = true;
errorOutput.write(msg);
errorOutput.write('\n');
}
}
};
task.setProject(new org.apache.tools.ant.Project());
File baseDir = build.getProject().getWorkspace().getLocal();
task.setDir(baseDir);
if(DESCRIPTOR.getCvspassFile().length()!=0)
task.setPassfile(new File(DESCRIPTOR.getCvspassFile()));
task.setCvsRoot(cvsroot);
task.setCvsRsh(cvsRsh);
task.setFailOnError(true);
task.setDestfile(changelogFile);
task.setBranch(branch);
task.setStart(build.getPreviousBuild().getTimestamp().getTime());
task.setEnd(build.getTimestamp().getTime());
if(changedFiles!=null) {
// if the directory doesn't exist, cvs changelog will die, so filter them out.
// this means we'll lose the log of those changes
for (String filePath : changedFiles) {
if(new File(baseDir,filePath).getParentFile().exists())
task.addFile(filePath);
}
} else {
// fallback
if(!flatten)
task.setPackage(module);
}
try {
task.execute();
if(hadError[0]) {
// non-fatal error must have occurred, such as cvs changelog parsing error.s
listener.getLogger().print(errorOutput);
}
return true;
} catch( BuildException e ) {
// capture output from the task for diagnosis
listener.getLogger().print(errorOutput);
// then report an error
PrintWriter w = listener.error(e.getMessage());
w.println("Working directory is "+baseDir);
e.printStackTrace(w);
return false;
} catch( RuntimeException e ) {
// an user reported a NPE inside the changeLog task.
// we don't want a bug in Ant to prevent a build.
e.printStackTrace(listener.error(e.getMessage()));
return true; // so record the message but continue
}
}
public DescriptorImpl getDescriptor() {
return DESCRIPTOR;
}
public void buildEnvVars(Map env) {
if(cvsRsh!=null)
env.put("CVS_RSH",cvsRsh);
String cvspass = DESCRIPTOR.getCvspassFile();
if(cvspass.length()!=0)
env.put("CVS_PASSFILE",cvspass);
}
static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
public static final class DescriptorImpl extends Descriptor<SCM> implements ModelObject {
DescriptorImpl() {
super(CVSSCM.class);
}
public String getDisplayName() {
return "CVS";
}
public SCM newInstance(StaplerRequest req) {
return new CVSSCM(
req.getParameter("cvs_root"),
req.getParameter("cvs_module"),
req.getParameter("cvs_branch"),
req.getParameter("cvs_rsh"),
req.getParameter("cvs_use_update")!=null,
req.getParameter("cvs_legacy")==null
);
}
public String getCvspassFile() {
String value = (String)getProperties().get("cvspass");
if(value==null)
value = "";
return value;
}
public void setCvspassFile(String value) {
getProperties().put("cvspass",value);
save();
}
/**
* Gets the URL that shows the diff.
*/
public String getDiffURL(String cvsRoot, String pathName, String oldRev, String newRev) {
String url = getProperties().get("repository-browser.diff." + cvsRoot).toString();
if(url==null) return null;
return url.replaceAll("%%P",pathName).replace("%%r",oldRev).replace("%%R",newRev);
}
public boolean configure( HttpServletRequest req ) {
setCvspassFile(req.getParameter("cvs_cvspass"));
Map<String,Object> properties = getProperties();
int i=0;
while(true) {
String root = req.getParameter("cvs_repobrowser_cvsroot" + i);
if(root==null) break;
setBrowser(req.getParameter("cvs_repobrowser"+i), properties, root, "repository-browser.");
setBrowser(req.getParameter("cvs_repobrowser_diff"+i), properties, root, "repository-browser.diff.");
i++;
}
save();
return true;
}
private void setBrowser(String key, Map<String, Object> properties, String root, String prefi) {
String value = Util.nullify(key);
if(value==null) {
properties.remove(prefi +root);
} else {
properties.put(prefi +root,value);
}
}
public Map<String,Object> getProperties() {
return super.getProperties();
}
//
// web methods
//
public void doCvsPassCheck(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
// this method can be used to check if a file exists anywhere in the file system,
// so it should be protected.
new FormFieldValidator(req,rsp,true) {
protected void check() throws IOException, ServletException {
String v = fixEmpty(request.getParameter("value"));
if(v==null) {
// default.
ok();
} else {
File cvsPassFile = new File(v);
if(cvsPassFile.exists()) {
ok();
} else {
error("No such file exists");
}
}
}
}.process();
}
/**
* Displays "cvs --version" for trouble shooting.
*/
public void doVersion(StaplerRequest req, StaplerResponse rsp) throws IOException {
rsp.setContentType("text/plain");
Proc proc = Hudson.getInstance().createLauncher(TaskListener.NULL).launch(
new String[]{"cvs", "--version"}, new String[0], rsp.getOutputStream(), FilePath.RANDOM);
proc.join();
}
/**
* Checks the entry to the CVSROOT field.
* <p>
* Also checks if .cvspass file contains the entry for this.
*/
public void doCvsrootCheck(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
new FormFieldValidator(req,rsp,false) {
protected void check() throws IOException, ServletException {
String v = fixEmpty(request.getParameter("value"));
if(v==null) {
error("CVSROOT is mandatory");
return;
}
// CVSROOT format isn't really that well defined. So it's hard to check this rigorously.
if(v.startsWith(":pserver") || v.startsWith(":ext")) {
if(!CVSROOT_PSERVER_PATTERN.matcher(v).matches()) {
error("Invalid CVSROOT string");
return;
}
// I can't really test if the machine name exists, either.
// some cvs, such as SOCKS-enabled cvs can resolve host names that Hudson might not
// be able to. If :ext is used, all bets are off anyway.
}
// check .cvspass file to see if it has entry.
// CVS handles authentication only if it's pserver.
if(v.startsWith(":pserver")) {
String cvspass = getCvspassFile();
File passfile;
if(cvspass.equals("")) {
passfile = new File(new File(System.getProperty("user.home")),".cvspass");
} else {
passfile = new File(cvspass);
}
if(passfile.exists()) {
// It's possible that we failed to locate the correct .cvspass file location,
// so don't report an error if we couldn't locate this file.
//
// if this is explicitly specified, then our system config page should have
// reported an error.
if(!scanCvsPassFile(passfile, v)) {
error("It doesn't look like this CVSROOT has its password set." +
" Would you like to set it now?");
return;
}
}
}
// all tests passed so far
ok();
}
}.process();
}
/**
* Checks if the given pserver CVSROOT value exists in the pass file.
*/
private boolean scanCvsPassFile(File passfile, String cvsroot) throws IOException {
cvsroot += ' ';
String cvsroot2 = "/1 "+cvsroot; // see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=5006835
BufferedReader in = new BufferedReader(new FileReader(passfile));
try {
String line;
while((line=in.readLine())!=null) {
// "/1 " version always have the port number in it, so examine a much with
// default port 2401 left out
int portIndex = line.indexOf(":2401/");
String line2 = "";
if(portIndex>=0)
line2 = line.substring(0,portIndex+1)+line.substring(portIndex+5); // leave '/'
if(line.startsWith(cvsroot) || line.startsWith(cvsroot2) || line2.startsWith(cvsroot2))
return true;
}
return false;
} finally {
in.close();
}
}
private static final Pattern CVSROOT_PSERVER_PATTERN =
Pattern.compile(":(ext|pserver):[^@:]+@[^:]+:(\\d+:)?.+");
/**
* Runs cvs login command.
*
* TODO: this apparently doesn't work. Probably related to the fact that
* cvs does some tty magic to disable ecoback or whatever.
*/
public void doPostPassword(StaplerRequest req, StaplerResponse rsp) throws IOException {
if(!Hudson.adminCheck(req,rsp))
return;
String cvsroot = req.getParameter("cvsroot");
String password = req.getParameter("password");
if(cvsroot==null || password==null) {
rsp.setStatus(HttpServletResponse.SC_BAD_REQUEST);
return;
}
rsp.setContentType("text/plain");
Proc proc = Hudson.getInstance().createLauncher(TaskListener.NULL).launch(
new String[]{"cvs", "-d",cvsroot,"login"}, new String[0],
new ByteArrayInputStream((password+"\n").getBytes()),
rsp.getOutputStream());
proc.join();
}
}
/**
* Action for a build that performs the tagging.
*/
public final class TagAction implements Action {
private final Build build;
/**
* If non-null, that means the build is already tagged.
*/
private String tagName;
/**
* If non-null, that means the tagging is in progress
* (asynchronously.)
*/
private transient TagWorkerThread workerThread;
public TagAction(Build build) {
this.build = build;
}
public String getIconFileName() {
return "save.gif";
}
public String getDisplayName() {
return "Tag this build";
}
public String getUrlName() {
return "tagBuild";
}
public String getTagName() {
return tagName;
}
public TagWorkerThread getWorkerThread() {
return workerThread;
}
public Build getBuild() {
return build;
}
public void doIndex(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
req.setAttribute("build",build);
req.getView(this,chooseAction()).forward(req,rsp);
}
private synchronized String chooseAction() {
if(tagName!=null)
return "alreadyTagged.jelly";
if(workerThread!=null)
return "inProgress.jelly";
return "tagForm.jelly";
}
/**
* Invoked to actually tag the workspace.
*/
public synchronized void doSubmit(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
String name = req.getParameter("name");
if(name==null || name.length()==0) {
// invalid tag name
doIndex(req,rsp);
return;
}
if(workerThread==null) {
workerThread = new TagWorkerThread(name);
workerThread.start();
}
doIndex(req,rsp);
}
/**
* Clears the error status.
*/
public synchronized void doClearError(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
if(workerThread!=null && !workerThread.isAlive())
workerThread = null;
doIndex(req,rsp);
}
public final class TagWorkerThread extends Thread {
private final String tagName;
// StringWriter is synchronized
private final StringWriter log = new StringWriter();
public TagWorkerThread(String tagName) {
this.tagName = tagName;
}
public String getLog() {
// this method can be invoked from another thread.
return log.toString();
}
public String getTagName() {
return tagName;
}
public void run() {
BuildListener listener = new StreamBuildListener(log);
Result result = Result.FAILURE;
File destdir = null;
listener.started();
try {
destdir = Util.createTempDir();
// unzip the archive
listener.getLogger().println("expanding the workspace archive into "+destdir);
Expand e = new Expand();
e.setProject(new org.apache.tools.ant.Project());
e.setDest(destdir);
e.setSrc(getArchiveFile(build));
e.setTaskType("unzip");
e.execute();
// run cvs tag command
listener.getLogger().println("tagging the workspace");
StringTokenizer tokens = new StringTokenizer(CVSSCM.this.module);
while(tokens.hasMoreTokens()) {
String m = tokens.nextToken();
ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add("cvs","tag","-R",tagName);
if(!CVSSCM.this.run(new Launcher(listener),cmd,listener,new FilePath(destdir).child(m))) {
listener.getLogger().println("tagging failed");
return;
}
}
// completed successfully
synchronized(TagAction.this) {
TagAction.this.tagName = this.tagName;
TagAction.this.workerThread = null;
}
build.save();
} catch (Throwable e) {
e.printStackTrace(listener.fatalError(e.getMessage()));
} finally {
try {
if(destdir!=null) {
listener.getLogger().println("cleaning up "+destdir);
Util.deleteRecursive(destdir);
}
} catch (IOException e) {
e.printStackTrace(listener.fatalError(e.getMessage()));
}
listener.finished(result);
}
}
}
}
}

View File

@ -0,0 +1,20 @@
package hudson.scm;
import hudson.model.Build;
import hudson.scm.ChangeLogSet.Entry;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
/**
* Encapsulates the file format of the changelog.
*
* Instances should be stateless, but
* persisted as a part of {@link Build}.
*
* @author Kohsuke Kawaguchi
*/
public abstract class ChangeLogParser {
public abstract ChangeLogSet<? extends Entry> parse(Build build, File changelogFile) throws IOException, SAXException;
}

View File

@ -0,0 +1,63 @@
package hudson.scm;
import hudson.model.User;
import java.util.Collections;
/**
* Represents SCM change list.
*
* Use the "index" view of this object to render the changeset detail page,
* and use the "digest" view of this object to render the summary page.
*
* @author Kohsuke Kawaguchi
*/
public abstract class ChangeLogSet<T extends ChangeLogSet.Entry> implements Iterable<T> {
/**
* Returns true if there's no change.
*/
public abstract boolean isEmptySet();
/**
* Constant instance that represents no changes.
*/
public static final ChangeLogSet<? extends Entry> EMPTY = new CVSChangeLogSet(Collections.EMPTY_LIST);
public static abstract class Entry {
public abstract String getMsg();
/**
* The user who made this change.
*
* @return
* never null.
*/
public abstract User getAuthor();
/**
* Message escaped for HTML
*/
public String getMsgEscaped() {
String msg = getMsg();
StringBuffer buf = new StringBuffer(msg.length()+64);
for( int i=0; i<msg.length(); i++ ) {
char ch = msg.charAt(i);
if(ch=='\n')
buf.append("<br>");
else
if(ch=='<')
buf.append("&lt;");
else
if(ch=='&')
buf.append("&amp;");
else
if(ch==' ')
buf.append("&nbsp;");
else
buf.append(ch);
}
return buf.toString();
}
}
}

View File

@ -0,0 +1,28 @@
package hudson.scm;
/**
* Designates the SCM operation.
*
* @author Kohsuke Kawaguchi
*/
public final class EditType {
private String name;
private String description;
public EditType(String name, String description) {
this.name = name;
this.description = description;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public static final EditType ADD = new EditType("add","The file was added");
public static final EditType EDIT = new EditType("edit","The file was modified");
public static final EditType DELETE = new EditType("delete","The file was removed");
}

View File

@ -0,0 +1,17 @@
package hudson.scm;
import hudson.model.Build;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
/**
* {@link ChangeLogParser} for no SCM.
* @author Kohsuke Kawaguchi
*/
public class NullChangeLogParser extends ChangeLogParser {
public ChangeLogSet parse(Build build, File changelogFile) throws IOException, SAXException {
return ChangeLogSet.EMPTY;
}
}

View File

@ -0,0 +1,57 @@
package hudson.scm;
import hudson.FilePath;
import hudson.Launcher;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import hudson.model.TaskListener;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import org.kohsuke.stapler.StaplerRequest;
/**
* No {@link SCM}.
*
* @author Kohsuke Kawaguchi
*/
public class NullSCM implements SCM {
public boolean pollChanges(Project project, Launcher launcher, FilePath dir, TaskListener listener) throws IOException {
// no change
return false;
}
public boolean checkout(Build build, Launcher launcher, FilePath remoteDir, BuildListener listener, File changeLogFile) throws IOException {
return true;
}
public Descriptor<SCM> getDescriptor() {
return DESCRIPTOR;
}
public void buildEnvVars(Map env) {
// noop
}
public FilePath getModuleRoot(FilePath workspace) {
return workspace;
}
public ChangeLogParser createChangeLogParser() {
return new NullChangeLogParser();
}
static final Descriptor<SCM> DESCRIPTOR = new Descriptor<SCM>(NullSCM.class) {
public String getDisplayName() {
return "None";
}
public SCM newInstance(StaplerRequest req) {
return new NullSCM();
}
};
}

View File

@ -0,0 +1,86 @@
package hudson.scm;
import hudson.FilePath;
import hudson.Launcher;
import hudson.ExtensionPoint;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Describable;
import hudson.model.Project;
import hudson.model.TaskListener;
import java.io.File;
import java.io.IOException;
import java.util.Map;
/**
* Captures the configuration information in it.
*
* <p>
* To register a custom {@link SCM} implementation from a plugin,
* add it to {@link SCMS#SCMS}.
*
* @author Kohsuke Kawaguchi
*/
public interface SCM extends Describable<SCM>, ExtensionPoint {
/**
* Checks if there has been any changes to this module in the repository.
*
* TODO: we need to figure out a better way to communicate an error back,
* so that we won't keep retrying the same node (for example a slave might be down.)
*
* @param project
* The project to check for updates
* @param launcher
* Abstraction of the machine where the polling will take place.
* @param workspace
* The workspace directory that contains baseline files.
* @param listener
* Logs during the polling should be sent here.
*
* @return true
* if the change is detected.
*/
boolean pollChanges(Project project, Launcher launcher, FilePath workspace, TaskListener listener) throws IOException;
/**
* Obtains a fresh workspace of the module(s) into the specified directory
* of the specified machine.
*
* <p>
* The "update" operation can be performed instead of a fresh checkout if
* feasible.
*
* <p>
* This operation should also capture the information necessary to tag the workspace later.
*
* @param launcher
* Abstracts away the machine that the files will be checked out.
* @param workspace
* a directory to check out the source code. May contain left-over
* from the previous build.
* @param changelogFile
* Upon a successful return, this file should capture the changelog.
* @return
* null if the operation fails. The error should be reported to the listener.
* Otherwise return the changes included in this update (if this was an update.)
*/
boolean checkout(Build build, Launcher launcher, FilePath workspace, BuildListener listener, File changelogFile) throws IOException;
/**
* Adds environmental variables for the builds to the given map.
*/
void buildEnvVars(Map env);
/**
* Gets the top directory of the checked out module.
* @param workspace
*/
FilePath getModuleRoot(FilePath workspace);
/**
* The returned object will be used to parse <tt>changelog.xml</tt>.
*/
ChangeLogParser createChangeLogParser();
}

View File

@ -0,0 +1,16 @@
package hudson.scm;
import hudson.model.Descriptor;
import java.util.List;
/**
* @author Kohsuke Kawaguchi
*/
public class SCMS {
/**
* List of all installed SCMs.
*/
public static final List<Descriptor<SCM>> SCMS =
Descriptor.toList(NullSCM.DESCRIPTOR,CVSSCM.DESCRIPTOR,SubversionSCM.DESCRIPTOR);
}

View File

@ -0,0 +1,43 @@
package hudson.scm;
import hudson.model.Build;
import hudson.scm.SubversionChangeLogSet.LogEntry;
import hudson.scm.SubversionChangeLogSet.Path;
import org.apache.commons.digester.Digester;
import org.xml.sax.SAXException;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
/**
* {@link ChangeLogParser} for Subversion.
*
* @author Kohsuke Kawaguchi
*/
public class SubversionChangeLogParser extends ChangeLogParser {
public SubversionChangeLogSet parse(Build build, File changelogFile) throws IOException, SAXException {
// http://svn.collab.net/repos/svn/trunk/subversion/svn/schema/
Digester digester = new Digester();
ArrayList<LogEntry> r = new ArrayList<LogEntry>();
digester.push(r);
digester.addObjectCreate("*/logentry", LogEntry.class);
digester.addSetProperties("*/logentry");
digester.addBeanPropertySetter("*/logentry/author","user");
digester.addBeanPropertySetter("*/logentry/date");
digester.addBeanPropertySetter("*/logentry/msg");
digester.addSetNext("*/logentry","add");
digester.addObjectCreate("*/logentry/paths/path", Path.class);
digester.addSetProperties("*/logentry/paths/path");
digester.addBeanPropertySetter("*/logentry/paths/path","value");
digester.addSetNext("*/logentry/paths/path","addPath");
digester.parse(changelogFile);
return new SubversionChangeLogSet(build,r);
}
}

View File

@ -0,0 +1,131 @@
package hudson.scm;
import hudson.model.Build;
import hudson.model.User;
import hudson.scm.SubversionChangeLogSet.LogEntry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Iterator;
/**
* {@link ChangeLogSet} for Subversion.
*
* @author Kohsuke Kawaguchi
*/
public final class SubversionChangeLogSet extends ChangeLogSet<LogEntry> {
private final List<LogEntry> logs;
private final Build build;
/**
* @GuardedBy this
*/
private Map<String,Integer> revisionMap;
/*package*/ SubversionChangeLogSet(Build build, List<LogEntry> logs) {
this.build = build;
this.logs = Collections.unmodifiableList(logs);
}
public boolean isEmptySet() {
return logs.isEmpty();
}
public List<LogEntry> getLogs() {
return logs;
}
public Iterator<LogEntry> iterator() {
return logs.iterator();
}
public synchronized Map<String,Integer> getRevisionMap() throws IOException {
if(revisionMap==null)
revisionMap = SubversionSCM.parseRevisionFile(build);
return revisionMap;
}
/**
* One commit.
*/
public static class LogEntry extends ChangeLogSet.Entry {
private int revision;
private User author;
private String date;
private String msg;
private List<Path> paths = new ArrayList<Path>();
public int getRevision() {
return revision;
}
public void setRevision(int revision) {
this.revision = revision;
}
public User getAuthor() {
return author;
}
public void setUser(String author) {
this.author = User.get(author);
}
public String getUser() {// digester wants read/write property, even though it never reads. Duh.
return author.getDisplayName();
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public String getMsg() {
return msg;
}
public void setMsg(String msg) {
this.msg = msg;
}
public void addPath( Path p ) {
paths.add(p);
}
public List<Path> getPaths() {
return paths;
}
}
public static class Path {
private char action;
private String value;
public void setAction(String action) {
this.action = action.charAt(0);
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public EditType getEditType() {
if( action=='A' )
return EditType.ADD;
if( action=='D' )
return EditType.DELETE;
return EditType.EDIT;
}
}
}

View File

@ -0,0 +1,532 @@
package hudson.scm;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Proc;
import hudson.Util;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import hudson.model.TaskListener;
import hudson.util.ArgumentListBuilder;
import hudson.util.FormFieldValidator;
import org.apache.commons.digester.Digester;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import org.xml.sax.SAXException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Subversion.
*
* Check http://svn.collab.net/repos/svn/trunk/subversion/svn/schema/ for
* various output formats.
*
* @author Kohsuke Kawaguchi
*/
public class SubversionSCM extends AbstractCVSFamilySCM {
private final String modules;
private boolean useUpdate;
private String username;
private String otherOptions;
SubversionSCM( String modules, boolean useUpdate, String username, String otherOptions ) {
StringBuilder normalizedModules = new StringBuilder();
StringTokenizer tokens = new StringTokenizer(modules);
while(tokens.hasMoreTokens()) {
if(normalizedModules.length()>0) normalizedModules.append(' ');
String m = tokens.nextToken();
if(m.endsWith("/"))
// the normalized name is always without the trailing '/'
m = m.substring(0,m.length()-1);
normalizedModules.append(m);
}
this.modules = normalizedModules.toString();
this.useUpdate = useUpdate;
this.username = nullify(username);
this.otherOptions = nullify(otherOptions);
}
public String getModules() {
return modules;
}
public boolean isUseUpdate() {
return useUpdate;
}
public String getUsername() {
return username;
}
public String getOtherOptions() {
return otherOptions;
}
private Collection<String> getModuleDirNames() {
List<String> dirs = new ArrayList<String>();
StringTokenizer tokens = new StringTokenizer(modules);
while(tokens.hasMoreTokens()) {
dirs.add(getLastPathComponent(tokens.nextToken()));
}
return dirs;
}
private boolean calcChangeLog(Build build, File changelogFile, Launcher launcher, BuildListener listener) throws IOException {
if(build.getPreviousBuild()==null) {
// nothing to compare against
return createEmptyChangeLog(changelogFile, listener, "log");
}
PrintStream logger = listener.getLogger();
Map<String,Integer> previousRevisions = parseRevisionFile(build.getPreviousBuild());
Map<String,Integer> thisRevisions = parseRevisionFile(build);
Map env = createEnvVarMap(true);
for( String module : getModuleDirNames() ) {
Integer prevRev = previousRevisions.get(module);
if(prevRev==null) {
logger.println("no revision recorded for "+module+" in the previous build");
continue;
}
Integer thisRev = thisRevisions.get(module);
if(thisRev!=null && thisRev.equals(prevRev)) {
logger.println("no change for "+module+" since the previous build");
continue;
}
String cmd = DESCRIPTOR.getSvnExe()+" log -v --xml --non-interactive -r "+(prevRev+1)+":BASE "+module;
OutputStream os = new BufferedOutputStream(new FileOutputStream(changelogFile));
try {
int r = launcher.launch(cmd,env,os,build.getProject().getWorkspace()).join();
if(r!=0) {
listener.fatalError("revision check failed");
// report the output
FileInputStream log = new FileInputStream(changelogFile);
try {
Util.copyStream(log,listener.getLogger());
} finally {
log.close();
}
return false;
}
} finally {
os.close();
}
}
return true;
}
/*package*/ static Map<String,Integer> parseRevisionFile(Build build) throws IOException {
Map<String,Integer> revisions = new HashMap<String,Integer>(); // module -> revision
{// read the revision file of the last build
File file = getRevisionFile(build);
if(!file.exists())
// nothing to compare against
return revisions;
BufferedReader br = new BufferedReader(new FileReader(file));
String line;
while((line=br.readLine())!=null) {
int index = line.indexOf('/');
if(index<0) {
continue; // invalid line?
}
try {
revisions.put(line.substring(0,index), Integer.parseInt(line.substring(index+1)));
} catch (NumberFormatException e) {
// perhaps a corrupted line. ignore
}
}
}
return revisions;
}
public boolean checkout(Build build, Launcher launcher, FilePath workspace, BuildListener listener, File changelogFile) throws IOException {
boolean result;
if(useUpdate && isUpdatable(workspace,listener)) {
result = update(launcher,workspace,listener);
if(!result)
return false;
} else {
workspace.deleteContents();
StringTokenizer tokens = new StringTokenizer(modules);
while(tokens.hasMoreTokens()) {
ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add(DESCRIPTOR.getSvnExe(),"co","-q","--non-interactive");
if(username!=null)
cmd.add("--username",username);
if(otherOptions!=null)
cmd.add(Util.tokenize(otherOptions));
cmd.add(tokens.nextToken());
result = run(launcher,cmd,listener,workspace);
if(!result)
return false;
}
}
// write out the revision file
PrintWriter w = new PrintWriter(new FileOutputStream(getRevisionFile(build)));
try {
Map<String,SvnInfo> revMap = buildRevisionMap(workspace,listener);
for (Entry<String,SvnInfo> e : revMap.entrySet()) {
w.println( e.getKey() +'/'+ e.getValue().revision );
}
} finally {
w.close();
}
return calcChangeLog(build, changelogFile, launcher, listener);
}
/**
* Output from "svn info" command.
*/
public static class SvnInfo {
/** The remote URL of this directory */
String url;
/** Current workspace revision. */
int revision = -1;
private SvnInfo() {}
/**
* Returns true if this object is fully populated.
*/
public boolean isComplete() {
return url!=null && revision!=-1;
}
public void setUrl(String url) {
this.url = url;
}
public void setRevision(int revision) {
this.revision = revision;
}
/**
* Executes "svn info" command and returns the parsed output
*
* @param subject
* The target to run "svn info". Either local path or remote URL.
*/
public static SvnInfo parse(String subject, Map env, FilePath workspace, TaskListener listener) throws IOException {
String cmd = DESCRIPTOR.getSvnExe()+" info --xml "+subject;
listener.getLogger().println("$ "+cmd);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int r = new Proc(cmd,env,baos,workspace.getLocal()).join();
if(r!=0) {
// failed. to allow user to diagnose the problem, send output to log
listener.getLogger().write(baos.toByteArray());
throw new IOException("svn info failed");
}
SvnInfo info = new SvnInfo();
Digester digester = new Digester();
digester.push(info);
digester.addBeanPropertySetter("info/entry/url");
digester.addSetProperties("info/entry/commit","revision","revision"); // set attributes. in particular @revision
try {
digester.parse(new ByteArrayInputStream(baos.toByteArray()));
} catch (SAXException e) {
// failed. to allow user to diagnose the problem, send output to log
listener.getLogger().write(baos.toByteArray());
e.printStackTrace(listener.fatalError("Failed to parse Subversion output"));
throw new IOException("Unabled to parse svn info output");
}
if(!info.isComplete())
throw new IOException("No revision in the svn info output");
return info;
}
}
/**
* Checks .svn files in the workspace and finds out revisions of the modules
* that the workspace has.
*
* @return
* null if the parsing somehow fails. Otherwise a map from module names to revisions.
*/
private Map<String,SvnInfo> buildRevisionMap(FilePath workspace, TaskListener listener) throws IOException {
PrintStream logger = listener.getLogger();
Map<String/*module name*/,SvnInfo> revisions = new HashMap<String,SvnInfo>();
Map env = createEnvVarMap(false);
// invoke the "svn info"
for( String module : getModuleDirNames() ) {
// parse the output
SvnInfo info = SvnInfo.parse(module,env,workspace,listener);
revisions.put(module,info);
logger.println("Revision:"+info.revision);
}
return revisions;
}
/**
* Gets the file that stores the revision.
*/
private static File getRevisionFile(Build build) {
return new File(build.getRootDir(),"revision.txt");
}
public boolean update(Launcher launcher, FilePath remoteDir, BuildListener listener) throws IOException {
ArgumentListBuilder cmd = new ArgumentListBuilder();
cmd.add(DESCRIPTOR.getSvnExe(), "update", "-q", "--non-interactive");
if(username!=null)
cmd.add(" --username ",username);
if(otherOptions!=null)
cmd.add(Util.tokenize(otherOptions));
StringTokenizer tokens = new StringTokenizer(modules);
while(tokens.hasMoreTokens()) {
if(!run(launcher,cmd,listener,new FilePath(remoteDir,getLastPathComponent(tokens.nextToken()))))
return false;
}
return true;
}
/**
* Returns true if we can use "svn update" instead of "svn checkout"
*/
private boolean isUpdatable(FilePath workspace,BuildListener listener) {
StringTokenizer tokens = new StringTokenizer(modules);
while(tokens.hasMoreTokens()) {
String url = tokens.nextToken();
String moduleName = getLastPathComponent(url);
File module = workspace.child(moduleName).getLocal();
try {
SvnInfo svnInfo = SvnInfo.parse(moduleName, createEnvVarMap(false), workspace, listener);
if(!svnInfo.url.equals(url)) {
listener.getLogger().println("Checking out a fresh workspace because the workspace is not "+url);
return false;
}
} catch (IOException e) {
listener.getLogger().println("Checking out a fresh workspace because Hudson failed to detect the current workspace "+module);
e.printStackTrace(listener.error(e.getMessage()));
return false;
}
}
return true;
}
public boolean pollChanges(Project project, Launcher launcher, FilePath workspace, TaskListener listener) throws IOException {
// current workspace revision
Map<String,SvnInfo> wsRev = buildRevisionMap(workspace,listener);
Map env = createEnvVarMap(false);
// check the corresponding remote revision
for (SvnInfo localInfo : wsRev.values()) {
SvnInfo remoteInfo = SvnInfo.parse(localInfo.url,env,workspace,listener);
if(remoteInfo.revision > localInfo.revision)
return true; // change found
}
return false; // no change
}
public ChangeLogParser createChangeLogParser() {
return new SubversionChangeLogParser();
}
public DescriptorImpl getDescriptor() {
return DESCRIPTOR;
}
public void buildEnvVars(Map env) {
// no environment variable
}
public FilePath getModuleRoot(FilePath workspace) {
String s;
// if multiple URLs are specified, pick the first one
int idx = modules.indexOf(' ');
if(idx>=0) s = modules.substring(0,idx);
else s = modules;
return workspace.child(getLastPathComponent(s));
}
private String getLastPathComponent(String s) {
String[] tokens = s.split("/");
return tokens[tokens.length-1]; // return the last token
}
static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
public static final class DescriptorImpl extends Descriptor<SCM> {
DescriptorImpl() {
super(SubversionSCM.class);
}
public String getDisplayName() {
return "Subversion";
}
public SCM newInstance(StaplerRequest req) {
return new SubversionSCM(
req.getParameter("svn_modules"),
req.getParameter("svn_use_update")!=null,
req.getParameter("svn_username"),
req.getParameter("svn_other_options")
);
}
public String getSvnExe() {
String value = (String)getProperties().get("svn_exe");
if(value==null)
value = "svn";
return value;
}
public void setSvnExe(String value) {
getProperties().put("svn_exe",value);
save();
}
public boolean configure( HttpServletRequest req ) {
setSvnExe(req.getParameter("svn_exe"));
return true;
}
/**
* Returns the Subversion version information.
*
* @return
* null if failed to obtain.
*/
public Version version(Launcher l, String svnExe) {
try {
if(svnExe==null || svnExe.equals("")) svnExe="svn";
ByteArrayOutputStream out = new ByteArrayOutputStream();
l.launch(new String[]{svnExe,"--version"},new String[0],out,FilePath.RANDOM).join();
// parse the first line for version
BufferedReader r = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(out.toByteArray())));
String line;
while((line = r.readLine())!=null) {
Matcher m = SVN_VERSION.matcher(line);
if(m.matches())
return new Version(Integer.parseInt(m.group(2)), m.group(1));
}
// ancient version of subversions didn't have the fixed version number line.
// or maybe something else is going wrong.
LOGGER.log(Level.WARNING, "Failed to parse the first line from svn output: "+line);
return new Version(0,"(unknown)");
} catch (IOException e) {
// Stack trace likely to be overkill for a problem that isn't necessarily a problem at all:
LOGGER.log(Level.WARNING, "Failed to check svn version: {0}", e.toString());
return null; // failed to obtain
}
}
// web methods
public void doVersionCheck(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
// this method runs a new process, so it needs to be protected
new FormFieldValidator(req,rsp,true) {
protected void check() throws IOException, ServletException {
String svnExe = request.getParameter("exe");
Version v = version(new Launcher(TaskListener.NULL),svnExe);
if(v==null) {
error("Failed to check subversion version info. Is this a valid path?");
return;
}
if(v.isOK()) {
ok();
} else {
error("Version "+v.versionId+" found, but 1.3.0 is required");
}
}
}.process();
}
}
public static final class Version {
private final int revision;
private String versionId;
public Version(int revision, String versionId) {
this.revision = revision;
this.versionId = versionId;
}
/**
* Repository revision ID of this build.
*/
public int getRevision() {
return revision;
}
/**
* Human-readable version string.
*/
public String getVersionId() {
return versionId;
}
/**
* We use "svn info --xml", which is new in 1.3.0
*/
public boolean isOK() {
return revision>=17949;
}
}
private static final Pattern SVN_VERSION = Pattern.compile("svn, .+ ([0-9.]+) \\(r([0-9]+)\\)");
private static final Logger LOGGER = Logger.getLogger(SubversionSCM.class.getName());
}

View File

@ -0,0 +1,3 @@
<html><body>
Hudson's interface with source code management systems. Start with <a href="SCM.html"><tt>SCM</tt></a>
</body></html>

View File

@ -0,0 +1,226 @@
package hudson.tasks;
import hudson.Launcher;
import hudson.Util;
import hudson.util.FormFieldValidator;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
import java.util.Map;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
/**
* @author Kohsuke Kawaguchi
*/
public class Ant extends Builder {
private final String targets;
/**
* Identifies {@link AntInstallation} to be used.
*/
private final String antName;
public Ant(String targets,String antName) {
this.targets = targets;
this.antName = antName;
}
public String getTargets() {
return targets;
}
/**
* Gets the Ant to invoke,
* or null to invoke the default one.
*/
public AntInstallation getAnt() {
for( AntInstallation i : DESCRIPTOR.getInstallations() ) {
if(antName!=null && i.getName().equals(antName))
return i;
}
return null;
}
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
Project proj = build.getProject();
String cmd;
String execName;
if(onWindows)
execName = "ant.bat";
else
execName = "ant";
AntInstallation ai = getAnt();
if(ai==null)
cmd = execName+' '+targets;
else {
File exec = ai.getExecutable();
if(!ai.getExists()) {
listener.fatalError(exec+" doesn't exist");
return false;
}
cmd = exec.getPath()+' '+targets;
}
Map<String,String> env = build.getEnvVars();
if(ai!=null)
env.put("ANT_HOME",ai.getAntHome());
if(onWindows) {
// on Windows, executing batch file can't return the correct error code,
// so we need to wrap it into cmd.exe.
// double %% is needed because we want ERRORLEVEL to be expanded after
// batch file executed, not before. This alone shows how broken Windows is...
cmd = "cmd.exe /C "+cmd+" && exit %%ERRORLEVEL%%";
}
try {
int r = launcher.launch(cmd,env,listener.getLogger(),proj.getModuleRoot()).join();
return r==0;
} catch (IOException e) {
Util.displayIOException(e,listener);
e.printStackTrace( listener.fatalError("command execution failed") );
return false;
}
}
public Descriptor<Builder> getDescriptor() {
return DESCRIPTOR;
}
public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
public static final class DescriptorImpl extends Descriptor<Builder> {
private DescriptorImpl() {
super(Ant.class);
}
public String getHelpFile() {
return "/help/project-config/ant.html";
}
public String getDisplayName() {
return "Invoke top-level Ant targets";
}
public AntInstallation[] getInstallations() {
AntInstallation[] r = (AntInstallation[]) getProperties().get("installations");
if(r==null)
return new AntInstallation[0];
return r.clone();
}
public boolean configure(HttpServletRequest req) {
boolean r = true;
int i;
String[] names = req.getParameterValues("ant_name");
String[] homes = req.getParameterValues("ant_home");
int len;
if(names!=null && homes!=null)
len = Math.min(names.length,homes.length);
else
len = 0;
AntInstallation[] insts = new AntInstallation[len];
for( i=0; i<len; i++ ) {
if(names[i].length()==0 || homes[i].length()==0) continue;
insts[i] = new AntInstallation(names[i],homes[i]);
}
getProperties().put("installations",insts);
save();
return r;
}
public Builder newInstance(StaplerRequest req) {
return new Ant(req.getParameter("ant_targets"),req.getParameter("ant_version"));
}
//
// web methods
//
/**
* Checks if the ANT_HOME is valid.
*/
public void doCheckAntHome( StaplerRequest req, StaplerResponse rsp ) throws IOException, ServletException {
// this can be used to check the existence of a file on the server, so needs to be protected
new FormFieldValidator(req,rsp,true) {
public void check() throws IOException, ServletException {
File f = getFileParameter("value");
if(!f.isDirectory()) {
error(f+" is not a directory");
return;
}
File antJar = new File(f,"lib/ant.jar");
if(!antJar.exists()) {
error(f+" doesn't look like an Ant directory");
return;
}
ok();
}
}.process();
}
}
public static final class AntInstallation {
private final String name;
private final String antHome;
public AntInstallation(String name, String antHome) {
this.name = name;
this.antHome = antHome;
}
/**
* install directory.
*/
public String getAntHome() {
return antHome;
}
/**
* Human readable display name.
*/
public String getName() {
return name;
}
public File getExecutable() {
String execName;
if(File.separatorChar=='\\')
execName = "ant.bat";
else
execName = "ant";
return new File(getAntHome(),"bin/"+execName);
}
/**
* Returns true if the executable exists.
*/
public boolean getExists() {
return getExecutable().exists();
}
}
private static final boolean onWindows = File.separatorChar == '\\';
}

View File

@ -0,0 +1,23 @@
package hudson.tasks;
import hudson.model.BuildListener;
import org.apache.tools.ant.BuildException;
import org.apache.tools.ant.Task;
/**
* {@link BuildStep} that uses Ant.
*
* Contains helper code.
*
* @author Kohsuke Kawaguchi
*/
public abstract class AntBasedPublisher extends Publisher {
protected final void execTask(Task task, BuildListener listener) {
try {
task.execute();
} catch( BuildException e ) {
// failing to archive isn't a fatal error
e.printStackTrace(listener.error(e.getMessage()));
}
}
}

View File

@ -0,0 +1,123 @@
package hudson.tasks;
import hudson.Launcher;
import hudson.Util;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import org.apache.tools.ant.taskdefs.Copy;
import org.apache.tools.ant.taskdefs.Delete;
import org.apache.tools.ant.types.FileSet;
import org.kohsuke.stapler.StaplerRequest;
import java.io.File;
import java.io.IOException;
/**
* Copies the artifacts into an archive directory.
*
* @author Kohsuke Kawaguchi
*/
public class ArtifactArchiver extends AntBasedPublisher {
/**
* Comma-separated list of files/directories to be archived.
*/
private final String artifacts;
/**
* Just keep the last successful artifact set, no more.
*/
private final boolean latestOnly;
public ArtifactArchiver(String artifacts, boolean latestOnly) {
this.artifacts = artifacts;
this.latestOnly = latestOnly;
}
public String getArtifacts() {
return artifacts;
}
public boolean isLatestOnly() {
return latestOnly;
}
public boolean prebuild(Build build, BuildListener listener) {
listener.getLogger().println("Removing artifacts from the previous build");
File dir = build.getArtifactsDir();
if(!dir.exists()) return true;
Delete delTask = new Delete();
delTask.setProject(new org.apache.tools.ant.Project());
delTask.setDir(dir);
delTask.setIncludes(artifacts);
execTask(delTask,listener);
return true;
}
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
Project p = build.getProject();
Copy copyTask = new Copy();
copyTask.setProject(new org.apache.tools.ant.Project());
File dir = build.getArtifactsDir();
dir.mkdirs();
copyTask.setTodir(dir);
FileSet src = new FileSet();
src.setDir(p.getWorkspace().getLocal());
src.setIncludes(artifacts);
copyTask.addFileset(src);
execTask(copyTask, listener);
if(latestOnly) {
Build b = p.getLastSuccessfulBuild();
if(b!=null) {
while(true) {
b = b.getPreviousBuild();
if(b==null) break;
// remove old artifacts
File ad = b.getArtifactsDir();
if(ad.exists()) {
listener.getLogger().println("Deleting old artifacts from "+b.getDisplayName());
try {
Util.deleteRecursive(ad);
} catch (IOException e) {
e.printStackTrace(listener.error(e.getMessage()));
}
}
}
}
}
return true;
}
public Descriptor<Publisher> getDescriptor() {
return DESCRIPTOR;
}
public static final Descriptor<Publisher> DESCRIPTOR = new Descriptor<Publisher>(ArtifactArchiver.class) {
public String getDisplayName() {
return "Archive the artifacts";
}
public String getHelpFile() {
return "/help/project-config/archive-artifact.html";
}
public Publisher newInstance(StaplerRequest req) {
return new ArtifactArchiver(
req.getParameter("artifacts").trim(),
req.getParameter("artifacts_latest_only")!=null);
}
};
}

View File

@ -0,0 +1,89 @@
package hudson.tasks;
import hudson.FilePath;
import hudson.Launcher;
import hudson.Util;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import org.kohsuke.stapler.StaplerRequest;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Writer;
/**
* Executes commands by using Windows batch file.
*
* @author Kohsuke Kawaguchi
*/
public class BatchFile extends Builder {
private final String command;
public BatchFile(String command) {
this.command = command;
}
public String getCommand() {
return command;
}
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
Project proj = build.getProject();
FilePath ws = proj.getWorkspace();
FilePath script=null;
try {
try {
script = ws.createTempFile("hudson",".bat");
Writer w = new FileWriter(script.getLocal());
w.write(command);
w.write("\r\nexit %ERRORLEVEL%");
w.close();
} catch (IOException e) {
Util.displayIOException(e,listener);
e.printStackTrace( listener.fatalError("Unable to produce a batch file") );
return false;
}
String[] cmd = new String[] {script.getRemote()};
int r;
try {
r = launcher.launch(cmd,build.getEnvVars(),listener.getLogger(),ws).join();
} catch (IOException e) {
Util.displayIOException(e,listener);
e.printStackTrace( listener.fatalError("command execution failed") );
r = -1;
}
return r==0;
} finally {
if(script!=null)
script.delete();
}
}
public Descriptor<Builder> getDescriptor() {
return DESCRIPTOR;
}
public static final DescriptorImpl DESCRIPTOR = new DescriptorImpl();
public static final class DescriptorImpl extends Descriptor<Builder> {
private DescriptorImpl() {
super(BatchFile.class);
}
public String getHelpFile() {
return "/help/project-config/batch.html";
}
public String getDisplayName() {
return "Execute Windows batch command";
}
public Builder newInstance(StaplerRequest req) {
return new BatchFile(req.getParameter("batchFile"));
}
}
}

View File

@ -0,0 +1,75 @@
package hudson.tasks;
import hudson.Launcher;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Project;
import hudson.tasks.junit.JUnitResultArchiver;
import java.util.List;
/**
* One step of the whole build process.
*
* @author Kohsuke Kawaguchi
*/
public interface BuildStep {
/**
* Runs before the build begins.
*
* @return
* true if the build can continue, false if there was an error
* and the build needs to be aborted.
*/
boolean prebuild( Build build, BuildListener listener );
/**
* Runs the step over the given build and reports the progress to the listener.
*
* @return
* true if the build can continue, false if there was an error
* and the build needs to be aborted.
*/
boolean perform(Build build, Launcher launcher, BuildListener listener);
/**
* Returns an action object if this {@link BuildStep} has an action
* to contribute to a {@link Project}.
*
* @param project
* {@link Project} that owns this build step,
* since {@link BuildStep} object doesn't usually have this "parent" pointer.
*/
Action getProjectAction(Project project);
/**
* List of all installed builders.
*
* Builders are invoked to perform the build itself.
*/
public static final List<Descriptor<Builder>> BUILDERS = Descriptor.toList(
Shell.DESCRIPTOR,
BatchFile.DESCRIPTOR,
Ant.DESCRIPTOR,
Maven.DESCRIPTOR
);
/**
* List of all installed publishers.
*
* Publishers are invoked after the build is completed, normally to perform
* some post-actions on build results, such as sending notifications, collecting
* results, etc.
*/
public static final List<Descriptor<Publisher>> PUBLISHERS = Descriptor.toList(
ArtifactArchiver.DESCRIPTOR,
Fingerprinter.DESCRIPTOR,
JavadocArchiver.DESCRIPTOR,
JUnitResultArchiver.DESCRIPTOR,
Mailer.DESCRIPTOR,
BuildTrigger.DESCRIPTOR
);
}

View File

@ -0,0 +1,103 @@
package hudson.tasks;
import hudson.Launcher;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Job;
import hudson.model.Project;
import hudson.model.Result;
import java.util.List;
import org.kohsuke.stapler.StaplerRequest;
/**
* Triggers builds of other projects.
*
* @author Kohsuke Kawaguchi
*/
public class BuildTrigger extends Publisher {
/**
* Comma-separated list of other projects to be scheduled.
*/
private String childProjects;
public BuildTrigger(String childProjects) {
this.childProjects = childProjects;
}
public BuildTrigger(List<Project> childProjects) {
this(Project.toNameList(childProjects));
}
public String getChildProjectsValue() {
return childProjects;
}
public List<Project> getChildProjects() {
return Project.fromNameList(childProjects);
}
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
if(build.getResult()== Result.SUCCESS) {
for (Project p : getChildProjects()) {
listener.getLogger().println("Triggering a new build of "+p.getName());
p.scheduleBuild();
}
}
return true;
}
/**
* Called from {@link Job#renameTo(String)} when a job is renamed.
*
* @return true
* if this {@link BuildTrigger} is changed and needs to be saved.
*/
public boolean onJobRenamed(String oldName, String newName) {
// quick test
if(!childProjects.contains(oldName))
return false;
boolean changed = false;
// we need to do this per string, since old Project object is already gone.
String[] projects = childProjects.split(",");
for( int i=0; i<projects.length; i++ ) {
if(projects[i].trim().equals(oldName)) {
projects[i] = newName;
changed = true;
}
}
if(changed) {
StringBuilder b = new StringBuilder();
for (String p : projects) {
if(b.length()>0) b.append(',');
b.append(p);
}
childProjects = b.toString();
}
return changed;
}
public Descriptor<Publisher> getDescriptor() {
return DESCRIPTOR;
}
public static final Descriptor<Publisher> DESCRIPTOR = new Descriptor<Publisher>(BuildTrigger.class) {
public String getDisplayName() {
return "Build other projects";
}
public Publisher newInstance(StaplerRequest req) {
return new BuildTrigger(req.getParameter("childProjects"));
}
};
}

View File

@ -0,0 +1,33 @@
package hudson.tasks;
import hudson.model.Describable;
import hudson.model.Action;
import hudson.model.Project;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.ExtensionPoint;
/**
* {@link BuildStep}s that perform the actual build.
*
* <p>
* To register a custom {@link Builder} from a plugin,
* add it to {@link BuildStep#BUILDERS}.
*
* @author Kohsuke Kawaguchi
*/
public abstract class Builder implements BuildStep, Describable<Builder>, ExtensionPoint {
/**
* Default implementation that does nothing.
*/
public boolean prebuild(Build build, BuildListener listener) {
return true;
}
/**
* Default implementation that does nothing.
*/
public Action getProjectAction(Project project) {
return null;
}
}

View File

@ -0,0 +1,241 @@
package hudson.tasks;
import hudson.Launcher;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.Fingerprint;
import hudson.model.Hudson;
import hudson.model.Project;
import hudson.model.Result;
import hudson.model.Fingerprint.BuildPtr;
import org.apache.tools.ant.DirectoryScanner;
import org.apache.tools.ant.types.FileSet;
import org.kohsuke.stapler.StaplerRequest;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import java.util.Set;
import java.util.Map.Entry;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Records fingerprints of the specified files.
*
* @author Kohsuke Kawaguchi
*/
public class Fingerprinter extends Publisher {
/**
* Comma-separated list of files/directories to be fingerprinted.
*/
private final String targets;
/**
* Also record all the finger prints of the build artifacts.
*/
private final boolean recordBuildArtifacts;
public Fingerprinter(String targets, boolean recordBuildArtifacts) {
this.targets = targets;
this.recordBuildArtifacts = recordBuildArtifacts;
}
public String getTargets() {
return targets;
}
public boolean getRecordBuildArtifacts() {
return recordBuildArtifacts;
}
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
listener.getLogger().println("Recording fingerprints");
Map<String,String> record = new HashMap<String,String>();
MessageDigest md5;
try {
md5 = MessageDigest.getInstance("MD5");
} catch (NoSuchAlgorithmException e) {
// I don't think this is possible, but check anyway
e.printStackTrace(listener.error("MD5 not installed"));
build.setResult(Result.FAILURE);
return true;
}
if(targets.length()!=0)
record(build, md5, listener, record, targets);
if(recordBuildArtifacts) {
ArtifactArchiver aa = (ArtifactArchiver) build.getProject().getPublishers().get(ArtifactArchiver.DESCRIPTOR);
if(aa==null) {
// configuration error
listener.error("Build artifacts are supposed to be fingerprinted, but build artifact archiving is not configured");
build.setResult(Result.FAILURE);
return true;
}
record(build, md5, listener, record, aa.getArtifacts() );
}
build.getActions().add(new FingerprintAction(build,record));
return true;
}
private void record(Build build, MessageDigest md5, BuildListener listener, Map<String,String> record, String targets) {
Project p = build.getProject();
FileSet src = new FileSet();
File baseDir = p.getWorkspace().getLocal();
src.setDir(baseDir);
src.setIncludes(targets);
byte[] buf = new byte[8192];
DirectoryScanner ds = src.getDirectoryScanner(new org.apache.tools.ant.Project());
for( String f : ds.getIncludedFiles() ) {
File file = new File(baseDir,f);
// consider the file to be produced by this build only if the timestamp
// is newer than when the build has started.
boolean produced = build.getTimestamp().getTimeInMillis() <= file.lastModified();
try {
md5.reset(); // technically not necessary, but hey, just to be safe
DigestInputStream in =new DigestInputStream(new FileInputStream(file),md5);
try {
while(in.read(buf)>0)
; // simply discard the input
} finally {
in.close();
}
Fingerprint fp = Hudson.getInstance().getFingerprintMap().getOrCreate(
produced?build:null, file.getName(), md5.digest());
if(fp==null) {
listener.error("failed to record fingerprint for "+file);
continue;
}
fp.add(build);
record.put(f,fp.getHashString());
} catch (IOException e) {
e.printStackTrace(listener.error("Failed to compute digest for "+file));
}
}
}
public Descriptor<Publisher> getDescriptor() {
return DESCRIPTOR;
}
public static final Descriptor<Publisher> DESCRIPTOR = new Descriptor<Publisher>(Fingerprinter.class) {
public String getDisplayName() {
return "Record fingerprints of files to track usage";
}
public String getHelpFile() {
return "/help/project-config/fingerprint.html";
}
public Publisher newInstance(StaplerRequest req) {
return new Fingerprinter(
req.getParameter("fingerprint_targets").trim(),
req.getParameter("fingerprint_artifacts")!=null);
}
};
/**
* Action for displaying fingerprints.
*/
public static final class FingerprintAction implements Action {
private final Build build;
private final Map<String,String> record;
private transient WeakReference<Map<String,Fingerprint>> ref;
public FingerprintAction(Build build, Map<String, String> record) {
this.build = build;
this.record = record;
}
public String getIconFileName() {
return "fingerprint.gif";
}
public String getDisplayName() {
return "See fingerprints";
}
public String getUrlName() {
return "fingerprints";
}
public Build getBuild() {
return build;
}
/**
* Map from file names of the fingeprinted file to its fingerprint record.
*/
public synchronized Map<String,Fingerprint> getFingerprints() {
if(ref!=null) {
Map<String,Fingerprint> m = ref.get();
if(m!=null)
return m;
}
Hudson h = Hudson.getInstance();
Map<String,Fingerprint> m = new TreeMap<String,Fingerprint>();
for (Entry<String, String> r : record.entrySet()) {
try {
m.put(r.getKey(), h._getFingerprint(r.getValue()) );
} catch (IOException e) {
logger.log(Level.WARNING,e.getMessage(),e);
}
}
m = Collections.unmodifiableMap(m);
ref = new WeakReference<Map<String,Fingerprint>>(m);
return m;
}
/**
* Gets the dependency to other builds in a map.
* Returns build numbers instead of {@link Build}, since log records may be gone.
*/
public Map<Project,Integer> getDependencies() {
Map<Project,Integer> r = new HashMap<Project,Integer>();
for (Fingerprint fp : getFingerprints().values()) {
BuildPtr bp = fp.getOriginal();
if(bp==null) continue; // outside Hudson
if(bp.is(build)) continue; // we are the owner
Integer existing = r.get(bp.getJob());
if(existing!=null && existing>bp.getNumber())
continue; // the record in the map is already up to date
r.put((Project)bp.getJob(),bp.getNumber());
}
return r;
}
}
private static final Logger logger = Logger.getLogger(Fingerprinter.class.getName());
}

View File

@ -0,0 +1,117 @@
package hudson.tasks;
import hudson.Launcher;
import hudson.model.Action;
import hudson.model.Build;
import hudson.model.BuildListener;
import hudson.model.Descriptor;
import hudson.model.DirectoryHolder;
import hudson.model.Project;
import hudson.model.ProminentProjectAction;
import org.apache.tools.ant.taskdefs.Copy;
import org.apache.tools.ant.types.FileSet;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
import javax.servlet.ServletException;
import java.io.File;
import java.io.IOException;
/**
* Saves javadoc for the project and publish them.
*
* @author Kohsuke Kawaguchi
*/
public class JavadocArchiver extends AntBasedPublisher {
/**
* Path to the javadoc directory in the workspace.
*/
private final String javadocDir;
public JavadocArchiver(String javadocDir) {
this.javadocDir = javadocDir;
}
public String getJavadocDir() {
return javadocDir;
}
/**
* Gets the directory where the javadoc is stored for the given project.
*/
private static File getJavadocDir(Project project) {
return new File(project.getRootDir(),"javadoc");
}
public boolean perform(Build build, Launcher launcher, BuildListener listener) {
// TODO: run tar or something for better remote copy
File javadoc = new File(build.getParent().getWorkspace().getLocal(), javadocDir);
if(!javadoc.exists()) {
listener.error("The specified javadoc directory doesn't exist: "+javadoc);
return false;
}
if(!javadoc.isDirectory()) {
listener.error("The specified javadoc directory isn't a directory: "+javadoc);
return false;
}
listener.getLogger().println("Publishing javadoc");
File target = getJavadocDir(build.getParent());
target.mkdirs();
Copy copyTask = new Copy();
copyTask.setProject(new org.apache.tools.ant.Project());
copyTask.setTodir(target);
FileSet src = new FileSet();
src.setDir(javadoc);
copyTask.addFileset(src);
execTask(copyTask, listener);
return true;
}
public Action getProjectAction(Project project) {
return new JavadocAction(project);
}
public Descriptor<Publisher> getDescriptor() {
return DESCRIPTOR;
}
public static final Descriptor<Publisher> DESCRIPTOR = new Descriptor<Publisher>(JavadocArchiver.class) {
public String getDisplayName() {
return "Publish javadoc";
}
public Publisher newInstance(StaplerRequest req) {
return new JavadocArchiver(req.getParameter("javadoc_dir"));
}
};
public static final class JavadocAction extends DirectoryHolder implements ProminentProjectAction {
private final Project project;
public JavadocAction(Project project) {
this.project = project;
}
public String getUrlName() {
return "javadoc";
}
public String getDisplayName() {
return "Javadoc";
}
public String getIconFileName() {
return "help.gif";
}
public void doDynamic(StaplerRequest req, StaplerResponse rsp) throws IOException, ServletException {
serveFile(req, rsp, getJavadocDir(project), "help.gif", false);
}
}
}

View File

@ -0,0 +1,113 @@
package hudson.tasks;
import hudson.model.Describable;
import hudson.model.Descriptor;
import hudson.model.Job;
import hudson.model.Run;
import hudson.scm.SCM;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.Calendar;
import java.util.GregorianCalendar;
import org.kohsuke.stapler.StaplerRequest;
/**
* Deletes old log files.
*
* TODO: is there any other task that follows the same pattern?
* try to generalize this just like {@link SCM} or {@link BuildStep}.
*
* @author Kohsuke Kawaguchi
*/
public class LogRotator implements Describable<LogRotator> {
/**
* If not -1, history is only kept up to this days.
*/
private final int daysToKeep;
/**
* If not -1, only this number of build logs are kept.
*/
private final int numToKeep;
public LogRotator(int daysToKeep, int numToKeep) {
this.daysToKeep = daysToKeep;
this.numToKeep = numToKeep;
}
public void perform(Job<?,?> job) throws IOException {
// keep the last successful build regardless of the status
Run lsb = job.getLastSuccessfulBuild();
if(numToKeep!=-1) {
Run[] builds = job.getBuilds().toArray(new Run[0]);
for( int i=numToKeep; i<builds.length; i++ ) {
if(!builds[i].isKeepLog() && builds[i]!=lsb)
builds[i].delete();
}
}
if(daysToKeep!=-1) {
Calendar cal = new GregorianCalendar();
cal.add(Calendar.DAY_OF_YEAR,-daysToKeep);
// copy it to the array becaues we'll be deleting builds as we go.
for( Run r : job.getBuilds().toArray(new Run[0]) ) {
if(r.getTimestamp().before(cal) && !r.isKeepLog() && r!=lsb)
r.delete();
}
}
}
public int getDaysToKeep() {
return daysToKeep;
}
public int getNumToKeep() {
return numToKeep;
}
public String getDaysToKeepStr() {
if(daysToKeep==-1) return "";
else return String.valueOf(daysToKeep);
}
public String getNumToKeepStr() {
if(numToKeep==-1) return "";
else return String.valueOf(numToKeep);
}
public LRDescriptor getDescriptor() {
return DESCRIPTOR;
}
public static final LRDescriptor DESCRIPTOR = new LRDescriptor();
public static final class LRDescriptor extends Descriptor<LogRotator> {
private LRDescriptor() {
super(LogRotator.class);
}
public String getDisplayName() {
return "Log Rotation";
}
public LogRotator newInstance(StaplerRequest req) {
return new LogRotator(
parse(req,"logrotate_days"),
parse(req,"logrotate_nums") );
}
private int parse(HttpServletRequest req, String name) {
String p = req.getParameter(name);
if(p==null) return -1;
try {
return Integer.parseInt(p);
} catch (NumberFormatException e) {
return -1;
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More