mirror of
https://github.com/apple/swift.git
synced 2025-12-14 20:36:38 +01:00
[Legacy Driver] Obsolete and remove batch compilation mode from the legacy driver
It is a maintenance burden and having the legacy driver exist in a simplified state reduces the possibility of things going wrong and hitting old bugs.
This commit is contained in:
@@ -114,9 +114,6 @@ WARNING(warn_opt_remark_disabled, none,
|
||||
"requires a single compiler invocation: consider enabling the "
|
||||
"-whole-module-optimization flag", ())
|
||||
|
||||
WARNING(warn_ignoring_batch_mode,none,
|
||||
"ignoring '-enable-batch-mode' because '%0' was also specified", (StringRef))
|
||||
|
||||
WARNING(warn_ignoring_wmo, none,
|
||||
"ignoring '-wmo' because '-dump-ast' was also specified", ())
|
||||
|
||||
|
||||
@@ -126,8 +126,6 @@ WARNING(warning_locale_path_not_found,none,
|
||||
"translation is disabled", (StringRef))
|
||||
WARNING(warning_cannot_find_locale_file,none,
|
||||
"cannot find translations for '%0' at '%1': no such file", (StringRef, StringRef))
|
||||
WARNING(warning_cannot_multithread_batch_mode,none,
|
||||
"ignoring -num-threads argument; cannot multithread batch mode", ())
|
||||
ERROR(error_cannot_explicit_interface_build_in_mode,none,
|
||||
"'-explicit-interface-module-build' only supported when building a module from interface ('-compile-module-from-interface' or '-typecheck-module-from-interface')'", ())
|
||||
ERROR(error_unsupported_option_argument,none,
|
||||
|
||||
@@ -108,11 +108,11 @@ private:
|
||||
DiagnosticEngine &Diags;
|
||||
|
||||
/// The ToolChain this Compilation was built with, that it may reuse to build
|
||||
/// subsequent BatchJobs.
|
||||
/// subsequent Jobs.
|
||||
const ToolChain &TheToolChain;
|
||||
|
||||
/// The OutputInfo, which the Compilation stores a copy of upon
|
||||
/// construction, and which it may use to build subsequent batch
|
||||
/// construction, and which it may use to build subsequent
|
||||
/// jobs itself.
|
||||
OutputInfo TheOutputInfo;
|
||||
|
||||
@@ -167,22 +167,6 @@ private:
|
||||
/// even if they returned an error status.
|
||||
bool ContinueBuildingAfterErrors = false;
|
||||
|
||||
/// Indicates whether groups of parallel frontend jobs should be merged
|
||||
/// together and run in composite "batch jobs" when possible, to reduce
|
||||
/// redundant work.
|
||||
const bool EnableBatchMode;
|
||||
|
||||
/// Provides a randomization seed to batch-mode partitioning, for debugging.
|
||||
const unsigned BatchSeed;
|
||||
|
||||
/// Overrides parallelism level and \c BatchSizeLimit, sets exact
|
||||
/// count of batches, if in batch-mode.
|
||||
const std::optional<unsigned> BatchCount;
|
||||
|
||||
/// Overrides maximum batch size, if in batch-mode and not overridden
|
||||
/// by \c BatchCount.
|
||||
const std::optional<unsigned> BatchSizeLimit;
|
||||
|
||||
/// True if temporary files should not be deleted.
|
||||
const bool SaveTemps;
|
||||
|
||||
@@ -244,10 +228,6 @@ public:
|
||||
std::unique_ptr<llvm::opt::DerivedArgList> TranslatedArgs,
|
||||
InputFileList InputsWithTypes,
|
||||
size_t FilelistThreshold,
|
||||
bool EnableBatchMode = false,
|
||||
unsigned BatchSeed = 0,
|
||||
std::optional<unsigned> BatchCount = std::nullopt,
|
||||
std::optional<unsigned> BatchSizeLimit = std::nullopt,
|
||||
bool SaveTemps = false,
|
||||
bool ShowDriverTimeCompilation = false,
|
||||
std::unique_ptr<UnifiedStatsReporter> Stats = nullptr,
|
||||
@@ -302,10 +282,6 @@ public:
|
||||
return DerivedOutputFileMap;
|
||||
}
|
||||
|
||||
bool getBatchModeEnabled() const {
|
||||
return EnableBatchMode;
|
||||
}
|
||||
|
||||
bool getContinueBuildingAfterErrors() const {
|
||||
return ContinueBuildingAfterErrors;
|
||||
}
|
||||
@@ -352,14 +328,6 @@ public:
|
||||
return Level;
|
||||
}
|
||||
|
||||
unsigned getBatchSeed() const {
|
||||
return BatchSeed;
|
||||
}
|
||||
|
||||
std::optional<unsigned> getBatchCount() const { return BatchCount; }
|
||||
|
||||
std::optional<unsigned> getBatchSizeLimit() const { return BatchSizeLimit; }
|
||||
|
||||
/// Requests the path to a file containing all input source files. This can
|
||||
/// be shared across jobs.
|
||||
///
|
||||
@@ -399,7 +367,7 @@ public:
|
||||
/// Unfortunately the success or failure of a Swift compilation is currently
|
||||
/// sensitive to the order in which files are processed, at least in terms of
|
||||
/// the order of processing extensions (and likely other ways we haven't
|
||||
/// discovered yet). So long as this is true, we need to make sure any batch
|
||||
/// discovered yet). So long as this is true, we need to make sure any
|
||||
/// job we build names its inputs in an order that's a subsequence of the
|
||||
/// sequence of inputs the driver was initially invoked with.
|
||||
///
|
||||
|
||||
@@ -67,19 +67,6 @@ public:
|
||||
/// A compilation using a single frontend invocation without -primary-file.
|
||||
SingleCompile,
|
||||
|
||||
/// A single process that batches together multiple StandardCompile Jobs.
|
||||
///
|
||||
/// Note: this is a transient value to use _only_ for the individual
|
||||
/// BatchJobs that are the temporary containers for multiple StandardCompile
|
||||
/// Jobs built by ToolChain::constructBatchJob.
|
||||
///
|
||||
/// In particular, the driver treats a batch-mode-enabled Compilation as
|
||||
/// having OutputInfo::CompilerMode == StandardCompile, with the
|
||||
/// Compilation::BatchModeEnabled flag set to true, _not_ as a
|
||||
/// BatchModeCompile Compilation. The top-level OutputInfo::CompilerMode for
|
||||
/// a Compilation should never be BatchModeCompile.
|
||||
BatchModeCompile,
|
||||
|
||||
/// Invoke the REPL
|
||||
REPL,
|
||||
|
||||
@@ -171,7 +158,7 @@ public:
|
||||
/// allowable OutputInfo::Mode values.
|
||||
enum class DriverKind {
|
||||
Interactive, // swift
|
||||
Batch, // swiftc
|
||||
Standard, // swiftc
|
||||
SILOpt, // sil-opt
|
||||
SILFuncExtractor,// sil-func-extractor
|
||||
SILNM, // sil-nm
|
||||
@@ -313,14 +300,12 @@ public:
|
||||
///
|
||||
/// \param TC The current tool chain.
|
||||
/// \param Args The input arguments.
|
||||
/// \param BatchMode Whether the driver has been explicitly or implicitly
|
||||
/// instructed to use batch mode.
|
||||
/// \param Inputs The inputs to the driver.
|
||||
/// \param[out] OI The OutputInfo in which to store the resulting output
|
||||
/// information.
|
||||
void buildOutputInfo(const ToolChain &TC,
|
||||
const llvm::opt::DerivedArgList &Args,
|
||||
const bool BatchMode, const InputFileList &Inputs,
|
||||
const InputFileList &Inputs,
|
||||
OutputInfo &OI) const;
|
||||
|
||||
/// Construct the list of Actions to perform for the given arguments,
|
||||
@@ -472,11 +457,8 @@ private:
|
||||
/// there is an actual conflict.
|
||||
/// \param Args The input arguments.
|
||||
/// \param Inputs The inputs to the driver.
|
||||
/// \param BatchModeOut An out-parameter flag that indicates whether to
|
||||
/// batch the jobs of the resulting \c Mode::StandardCompile compilation.
|
||||
OutputInfo::Mode computeCompilerMode(const llvm::opt::DerivedArgList &Args,
|
||||
const InputFileList &Inputs,
|
||||
bool &BatchModeOut) const;
|
||||
const InputFileList &Inputs) const;
|
||||
};
|
||||
|
||||
} // end namespace driver
|
||||
|
||||
@@ -389,8 +389,7 @@ public:
|
||||
StringRef Terminator = "\n") const;
|
||||
|
||||
/// Call the provided Callback with any Jobs (and their possibly-quasi-PIDs)
|
||||
/// contained within this Job; if this job is not a BatchJob, just pass \c
|
||||
/// this and the provided \p OSPid back to the Callback.
|
||||
/// contained within this Job;
|
||||
virtual void forEachContainedJobAndPID(
|
||||
llvm::sys::procid_t OSPid,
|
||||
llvm::function_ref<void(const Job *, Job::PID)> Callback) const {
|
||||
@@ -411,51 +410,6 @@ public:
|
||||
StringRef getFirstSwiftPrimaryInput() const;
|
||||
};
|
||||
|
||||
/// A BatchJob comprises a _set_ of jobs, each of which is sufficiently similar
|
||||
/// to the others that the whole set can be combined into a single subprocess
|
||||
/// (and thus run potentially more-efficiently than running each Job in the set
|
||||
/// individually).
|
||||
///
|
||||
/// Not all Jobs can be combined into a BatchJob: at present, only those Jobs
|
||||
/// that come from CompileJobActions, and which otherwise have the exact same
|
||||
/// input file list and arguments as one another, aside from their primary-file.
|
||||
/// See ToolChain::jobsAreBatchCombinable for details.
|
||||
|
||||
class BatchJob : public Job {
|
||||
|
||||
/// The set of constituents making up the batch.
|
||||
const SmallVector<const Job *, 4> CombinedJobs;
|
||||
|
||||
/// A negative number to use as the base value for assigning quasi-PID to Jobs
|
||||
/// in the \c CombinedJobs array. Quasi-PIDs count _down_ from this value.
|
||||
const Job::PID QuasiPIDBase;
|
||||
|
||||
public:
|
||||
BatchJob(const JobAction &Source, SmallVectorImpl<const Job *> &&Inputs,
|
||||
std::unique_ptr<CommandOutput> Output, const char *Executable,
|
||||
llvm::opt::ArgStringList Arguments,
|
||||
EnvironmentVector ExtraEnvironment, std::vector<FilelistInfo> Infos,
|
||||
ArrayRef<const Job *> Combined, Job::PID &NextQuasiPID,
|
||||
std::optional<ResponseFileInfo> ResponseFile = std::nullopt);
|
||||
|
||||
ArrayRef<const Job*> getCombinedJobs() const {
|
||||
return CombinedJobs;
|
||||
}
|
||||
|
||||
/// Call the provided callback for each Job in the batch, passing the
|
||||
/// corresponding quasi-PID with each Job.
|
||||
void forEachContainedJobAndPID(
|
||||
llvm::sys::procid_t OSPid,
|
||||
llvm::function_ref<void(const Job *, Job::PID)> Callback) const override {
|
||||
Job::PID QPid = QuasiPIDBase;
|
||||
assert(QPid < 0);
|
||||
for (auto const *J : CombinedJobs) {
|
||||
assert(QPid != std::numeric_limits<Job::PID>::min());
|
||||
Callback(J, QPid--);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
} // end namespace driver
|
||||
} // end namespace swift
|
||||
|
||||
|
||||
@@ -98,7 +98,7 @@ protected:
|
||||
bool shouldUseSupplementaryOutputFileMapInFrontendInvocation() const;
|
||||
|
||||
/// Reify the existing behavior that SingleCompile compile actions do not
|
||||
/// filter, but batch-mode and single-file compilations do. Some clients are
|
||||
/// filter, single-file compilations do. Some clients are
|
||||
/// relying on this (i.e., they pass inputs that don't have ".swift" as an
|
||||
/// extension.) It would be nice to eliminate this distinction someday.
|
||||
bool shouldFilterFrontendInputsByType() const;
|
||||
@@ -281,33 +281,6 @@ public:
|
||||
std::unique_ptr<CommandOutput> output,
|
||||
const OutputInfo &OI) const;
|
||||
|
||||
/// Return true iff the input \c Job \p A is an acceptable candidate for
|
||||
/// batching together into a BatchJob, via a call to \c
|
||||
/// constructBatchJob. This is true when the \c Job is a built from a \c
|
||||
/// CompileJobAction in a \c Compilation \p C running in \c
|
||||
/// OutputInfo::Mode::StandardCompile output mode, with a single \c TY_Swift
|
||||
/// \c InputAction.
|
||||
bool jobIsBatchable(const Compilation &C, const Job *A) const;
|
||||
|
||||
/// Equivalence relation that holds iff the two input Jobs \p A and \p B are
|
||||
/// acceptable candidates for combining together into a \c BatchJob, via a
|
||||
/// call to \c constructBatchJob. This is true when each job independently
|
||||
/// satisfies \c jobIsBatchable, and the two jobs have identical executables,
|
||||
/// output types and environments (i.e. they are identical aside from their
|
||||
/// inputs).
|
||||
bool jobsAreBatchCombinable(const Compilation &C, const Job *A,
|
||||
const Job *B) const;
|
||||
|
||||
/// Construct a \c BatchJob that subsumes the work of a set of Jobs. Any pair
|
||||
/// of elements in \p Jobs are assumed to satisfy the equivalence relation \c
|
||||
/// jobsAreBatchCombinable, i.e. they should all be "the same" job in in all
|
||||
/// ways other than their choices of inputs. The provided \p NextQuasiPID
|
||||
/// should be a negative number that persists between calls; this method will
|
||||
/// decrement it to assign quasi-PIDs to each of the \p Jobs passed.
|
||||
std::unique_ptr<Job> constructBatchJob(ArrayRef<const Job *> Jobs,
|
||||
int64_t &NextQuasiPID,
|
||||
Compilation &C) const;
|
||||
|
||||
/// Return the default language type to use for the given extension.
|
||||
/// If the extension is empty or is otherwise not recognized, return
|
||||
/// the invalid type \c TY_INVALID.
|
||||
|
||||
@@ -33,7 +33,7 @@ def ModuleWrapOption : OptionFlag;
|
||||
def NoDriverOption : OptionFlag;
|
||||
|
||||
// Some options should not be available depending on whether this is the
|
||||
// interactive driver 'swift', or the batch compiler 'swiftc'.
|
||||
// interactive driver 'swift', or the aot compiler 'swiftc'.
|
||||
def NoInteractiveOption : OptionFlag;
|
||||
def NoBatchOption : OptionFlag;
|
||||
|
||||
|
||||
@@ -52,12 +52,6 @@
|
||||
#include <io.h>
|
||||
#endif
|
||||
|
||||
#define DEBUG_TYPE "batch-mode"
|
||||
|
||||
// Batch-mode has a sub-mode for testing that randomizes batch partitions,
|
||||
// by user-provided seed. That is the only thing randomized here.
|
||||
#include <random>
|
||||
|
||||
using namespace swift;
|
||||
using namespace swift::sys;
|
||||
using namespace swift::driver;
|
||||
@@ -111,10 +105,6 @@ Compilation::Compilation(DiagnosticEngine &Diags,
|
||||
std::unique_ptr<DerivedArgList> TranslatedArgs,
|
||||
InputFileList InputsWithTypes,
|
||||
size_t FilelistThreshold,
|
||||
bool EnableBatchMode,
|
||||
unsigned BatchSeed,
|
||||
std::optional<unsigned> BatchCount,
|
||||
std::optional<unsigned> BatchSizeLimit,
|
||||
bool SaveTemps,
|
||||
bool ShowDriverTimeCompilation,
|
||||
std::unique_ptr<UnifiedStatsReporter> StatsReporter,
|
||||
@@ -125,10 +115,6 @@ Compilation::Compilation(DiagnosticEngine &Diags,
|
||||
RawInputArgs(std::move(InputArgs)),
|
||||
TranslatedArgs(std::move(TranslatedArgs)),
|
||||
InputFilesWithTypes(std::move(InputsWithTypes)),
|
||||
EnableBatchMode(EnableBatchMode),
|
||||
BatchSeed(BatchSeed),
|
||||
BatchCount(BatchCount),
|
||||
BatchSizeLimit(BatchSizeLimit),
|
||||
SaveTemps(SaveTemps),
|
||||
ShowDriverTimeCompilation(ShowDriverTimeCompilation),
|
||||
Stats(std::move(StatsReporter)),
|
||||
@@ -140,7 +126,6 @@ static bool writeFilelistIfNecessary(const Job *job, const ArgList &args,
|
||||
DiagnosticEngine &diags);
|
||||
|
||||
using CommandSetVector = llvm::SetVector<const Job*>;
|
||||
using BatchPartition = std::vector<std::vector<const Job*>>;
|
||||
|
||||
namespace {
|
||||
static DetailedTaskDescription
|
||||
@@ -205,26 +190,9 @@ namespace driver {
|
||||
CommandSet ScheduledCommands;
|
||||
|
||||
/// A temporary buffer to hold commands that were scheduled but haven't been
|
||||
/// added to the Task Queue yet, because we might try batching them together
|
||||
/// first.
|
||||
/// added to the Task Queue yet
|
||||
CommandSetVector PendingExecution;
|
||||
|
||||
/// Set of synthetic BatchJobs that serve to cluster subsets of jobs waiting
|
||||
/// in PendingExecution. Also used to identify (then unpack) BatchJobs back
|
||||
/// to their underlying non-Batch Jobs, when running a callback from
|
||||
/// TaskQueue.
|
||||
CommandSet BatchJobs;
|
||||
|
||||
/// Persistent counter for allocating quasi-PIDs to Jobs combined into
|
||||
/// BatchJobs. Quasi-PIDs are _negative_ PID-like unique keys used to
|
||||
/// masquerade BatchJob constituents as (quasi)processes, when writing
|
||||
/// parseable output to consumers that don't understand the idea of a batch
|
||||
/// job. They are negative in order to avoid possibly colliding with real
|
||||
/// PIDs (which are always positive). We start at -1000 here as a crude but
|
||||
/// harmless hedge against colliding with an errno value that might slip
|
||||
/// into the stream of real PIDs (say, due to a TaskQueue bug).
|
||||
int64_t NextBatchQuasiPID = parseable_output::QUASI_PID_START;
|
||||
|
||||
/// All jobs which have finished execution or which have been determined
|
||||
/// that they don't need to run.
|
||||
CommandSet FinishedCommands;
|
||||
@@ -289,7 +257,7 @@ namespace driver {
|
||||
ScheduledCommands.insert(Cmd);
|
||||
|
||||
// Adding to pending means it should be in the next round of additions to
|
||||
// the task queue (either batched or singularly); we remove Jobs from
|
||||
// the task queue; we remove Jobs from
|
||||
// PendingExecution once we hand them over to the TaskQueue.
|
||||
PendingExecution.insert(Cmd);
|
||||
}
|
||||
@@ -345,10 +313,6 @@ namespace driver {
|
||||
}
|
||||
}
|
||||
|
||||
bool isBatchJob(const Job *MaybeBatchJob) const {
|
||||
return BatchJobs.count(MaybeBatchJob) != 0;
|
||||
}
|
||||
|
||||
/// Callback which will be called immediately after a task has started. This
|
||||
/// callback may be used to provide output indicating that the task began.
|
||||
void taskBegan(ProcessId Pid, void *Context) {
|
||||
@@ -390,60 +354,6 @@ namespace driver {
|
||||
}
|
||||
}
|
||||
|
||||
/// Check to see if a job produced a zero-length serialized diagnostics
|
||||
/// file, which is used to indicate batch-constituents that were batched
|
||||
/// together with a failing constituent but did not, themselves, produce any
|
||||
/// errors.
|
||||
bool jobWasBatchedWithFailingJobs(const Job *J) const {
|
||||
auto DiaPath =
|
||||
J->getOutput().getAnyOutputForType(file_types::TY_SerializedDiagnostics);
|
||||
if (DiaPath.empty())
|
||||
return false;
|
||||
if (!llvm::sys::fs::is_regular_file(DiaPath))
|
||||
return false;
|
||||
uint64_t Size;
|
||||
auto EC = llvm::sys::fs::file_size(DiaPath, Size);
|
||||
if (EC)
|
||||
return false;
|
||||
return Size == 0;
|
||||
}
|
||||
|
||||
/// If a batch-constituent job happens to be batched together with a job
|
||||
/// that exits with an error, the batch-constituent may be considered
|
||||
/// "cancelled".
|
||||
bool jobIsCancelledBatchConstituent(int ReturnCode,
|
||||
const Job *ContainerJob,
|
||||
const Job *ConstituentJob) {
|
||||
return ReturnCode != 0 &&
|
||||
isBatchJob(ContainerJob) &&
|
||||
jobWasBatchedWithFailingJobs(ConstituentJob);
|
||||
}
|
||||
|
||||
/// Unpack a \c BatchJob that has finished into its constituent \c Job
|
||||
/// members, and call \c taskFinished on each, propagating any \c
|
||||
/// TaskFinishedResponse other than \c
|
||||
/// TaskFinishedResponse::ContinueExecution from any of the constituent
|
||||
/// calls.
|
||||
TaskFinishedResponse
|
||||
unpackAndFinishBatch(int ReturnCode, StringRef Output,
|
||||
StringRef Errors, const BatchJob *B) {
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << "Batch job finished: " << LogJob(B) << "\n";
|
||||
auto res = TaskFinishedResponse::ContinueExecution;
|
||||
for (const Job *J : B->getCombinedJobs()) {
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << " ==> Unpacked batch constituent finished: "
|
||||
<< LogJob(J) << "\n";
|
||||
auto r = taskFinished(
|
||||
llvm::sys::ProcessInfo::InvalidPid, ReturnCode, Output, Errors,
|
||||
TaskProcessInformation(llvm::sys::ProcessInfo::InvalidPid),
|
||||
(void *)J);
|
||||
if (r != TaskFinishedResponse::ContinueExecution)
|
||||
res = r;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
void
|
||||
emitParseableOutputForEachFinishedJob(ProcessId Pid, int ReturnCode,
|
||||
StringRef Output,
|
||||
@@ -451,20 +361,10 @@ namespace driver {
|
||||
TaskProcessInformation ProcInfo) {
|
||||
FinishedCmd->forEachContainedJobAndPID(Pid, [&](const Job *J,
|
||||
Job::PID P) {
|
||||
if (jobIsCancelledBatchConstituent(ReturnCode, FinishedCmd, J)) {
|
||||
// Simulate SIGINT-interruption to parseable-output consumer for any
|
||||
// constituent of a failing batch job that produced no errors of its
|
||||
// own.
|
||||
parseable_output::emitSignalledMessage(llvm::errs(),
|
||||
J->getSource().getClassName(),
|
||||
"cancelled batch constituent",
|
||||
"", SIGINT, P, ProcInfo);
|
||||
} else {
|
||||
parseable_output::emitFinishedMessage(llvm::errs(),
|
||||
J->getSource().getClassName(),
|
||||
Output.str(), ReturnCode,
|
||||
P, ProcInfo);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -490,11 +390,6 @@ namespace driver {
|
||||
Comp.getStatsReporter()->recordJobMaxRSS(
|
||||
ProcInfo.getResourceUsage()->Maxrss);
|
||||
|
||||
if (isBatchJob(FinishedCmd)) {
|
||||
return unpackAndFinishBatch(ReturnCode, Output, Errors,
|
||||
static_cast<const BatchJob *>(FinishedCmd));
|
||||
}
|
||||
|
||||
if (ReturnCode != EXIT_SUCCESS)
|
||||
return taskFailed(FinishedCmd, ReturnCode);
|
||||
|
||||
@@ -522,12 +417,6 @@ namespace driver {
|
||||
ReturnCode);
|
||||
}
|
||||
|
||||
// See how ContinueBuildingAfterErrors gets set up in Driver.cpp for
|
||||
// more info.
|
||||
assert((Comp.getContinueBuildingAfterErrors() ||
|
||||
!Comp.getBatchModeEnabled()) &&
|
||||
"batch mode diagnostics require ContinueBuildingAfterErrors");
|
||||
|
||||
return Comp.getContinueBuildingAfterErrors()
|
||||
? TaskFinishedResponse::ContinueExecution
|
||||
: TaskFinishedResponse::StopExecution;
|
||||
@@ -636,17 +525,14 @@ namespace driver {
|
||||
: Comp(Comp),
|
||||
TQ(std::move(TaskQueue)) {}
|
||||
|
||||
/// Schedule and run initial, additional, and batch jobs.
|
||||
/// Schedule and run initial, additional, and single-file jobs.
|
||||
void runJobs() {
|
||||
scheduleJobsBeforeBatching();
|
||||
formBatchJobsAndAddPendingJobsToTaskQueue();
|
||||
scheduleJobsForNonIncrementalCompilation();
|
||||
addPendingJobsToTaskQueue();
|
||||
runTaskQueueToCompletion();
|
||||
}
|
||||
|
||||
private:
|
||||
void scheduleJobsBeforeBatching() {
|
||||
scheduleJobsForNonIncrementalCompilation();
|
||||
}
|
||||
|
||||
void scheduleJobsForNonIncrementalCompilation() {
|
||||
for (const Job *Cmd : Comp.getJobs())
|
||||
@@ -667,261 +553,9 @@ namespace driver {
|
||||
Cmds.clear();
|
||||
}
|
||||
|
||||
/// Partition the jobs in \c PendingExecution into those that are \p
|
||||
/// Batchable and those that are \p NonBatchable, clearing \p
|
||||
/// PendingExecution.
|
||||
void getPendingBatchableJobs(CommandSetVector &Batchable,
|
||||
CommandSetVector &NonBatchable) {
|
||||
for (const Job *Cmd : PendingExecution) {
|
||||
if (Comp.getToolChain().jobIsBatchable(Comp, Cmd)) {
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << "Batchable: " << LogJob(Cmd) << "\n";
|
||||
Batchable.insert(Cmd);
|
||||
} else {
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << "Not batchable: " << LogJob(Cmd) << "\n";
|
||||
NonBatchable.insert(Cmd);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// If \p Batch is nonempty, construct a new \c BatchJob from its
|
||||
/// contents by calling \p ToolChain::constructBatchJob, then insert the
|
||||
/// new \c BatchJob into \p Batches.
|
||||
void
|
||||
formBatchJobFromPartitionBatch(std::vector<const Job *> &Batches,
|
||||
std::vector<const Job *> const &Batch) {
|
||||
if (Batch.empty())
|
||||
return;
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << "Forming batch job from "
|
||||
<< Batch.size() << " constituents\n";
|
||||
auto const &TC = Comp.getToolChain();
|
||||
auto J = TC.constructBatchJob(Batch, NextBatchQuasiPID, Comp);
|
||||
if (J)
|
||||
Batches.push_back(Comp.addJob(std::move(J)));
|
||||
}
|
||||
|
||||
/// Build a vector of partition indices, one per Job: the i'th index says
|
||||
/// which batch of the partition the i'th Job will be assigned to. If we are
|
||||
/// shuffling due to -driver-batch-seed, the returned indices will not be
|
||||
/// arranged in contiguous runs. We shuffle partition-indices here, not
|
||||
/// elements themselves, to preserve the invariant that each batch is a
|
||||
/// subsequence of the full set of inputs, not just a subset.
|
||||
std::vector<size_t>
|
||||
assignJobsToPartitions(size_t PartitionSize,
|
||||
size_t NumJobs) {
|
||||
size_t Remainder = NumJobs % PartitionSize;
|
||||
size_t TargetSize = NumJobs / PartitionSize;
|
||||
std::vector<size_t> PartitionIndex;
|
||||
PartitionIndex.reserve(NumJobs);
|
||||
for (size_t P = 0; P < PartitionSize; ++P) {
|
||||
// Spread remainder evenly across partitions by adding 1 to the target
|
||||
// size of the first Remainder of them.
|
||||
size_t FillCount = TargetSize + ((P < Remainder) ? 1 : 0);
|
||||
std::fill_n(std::back_inserter(PartitionIndex), FillCount, P);
|
||||
}
|
||||
if (Comp.getBatchSeed() != 0) {
|
||||
std::minstd_rand gen(Comp.getBatchSeed());
|
||||
std::shuffle(PartitionIndex.begin(), PartitionIndex.end(), gen);
|
||||
}
|
||||
assert(PartitionIndex.size() == NumJobs);
|
||||
return PartitionIndex;
|
||||
}
|
||||
|
||||
/// Create \c NumberOfParallelCommands batches and assign each job to a
|
||||
/// batch either filling each partition in order or, if seeded with a
|
||||
/// nonzero value, pseudo-randomly (but deterministically and nearly-evenly).
|
||||
void partitionIntoBatches(const llvm::SmallVectorImpl<const Job *> &Batchable,
|
||||
BatchPartition &Partition) {
|
||||
if (Comp.getShowJobLifecycle()) {
|
||||
llvm::outs() << "Found " << Batchable.size() << " batchable jobs\n";
|
||||
llvm::outs() << "Forming into " << Partition.size() << " batches\n";
|
||||
}
|
||||
|
||||
assert(!Partition.empty());
|
||||
auto PartitionIndex = assignJobsToPartitions(Partition.size(),
|
||||
Batchable.size());
|
||||
assert(PartitionIndex.size() == Batchable.size());
|
||||
auto const &TC = Comp.getToolChain();
|
||||
for_each(Batchable, PartitionIndex, [&](const Job *Cmd, size_t Idx) {
|
||||
assert(Idx < Partition.size());
|
||||
std::vector<const Job*> &P = Partition[Idx];
|
||||
if (P.empty() || TC.jobsAreBatchCombinable(Comp, P[0], Cmd)) {
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << "Adding " << LogJob(Cmd)
|
||||
<< " to batch " << Idx << '\n';
|
||||
P.push_back(Cmd);
|
||||
} else {
|
||||
// Strange but theoretically possible that we have a batchable job
|
||||
// that's not combinable with others; tack a new batch on for it.
|
||||
if (Comp.getShowJobLifecycle())
|
||||
llvm::outs() << "Adding " << LogJob(Cmd)
|
||||
<< " to new batch " << Partition.size() << '\n';
|
||||
Partition.push_back(std::vector<const Job*>());
|
||||
Partition.back().push_back(Cmd);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Selects the number of partitions based on the user-provided batch
|
||||
// count and/or the number of parallel tasks we can run, subject to a
|
||||
// fixed per-batch safety cap, to avoid overcommitting memory.
|
||||
size_t pickNumberOfPartitions() {
|
||||
|
||||
// If the user asked for something, use that.
|
||||
if (Comp.getBatchCount().has_value())
|
||||
return Comp.getBatchCount().value();
|
||||
|
||||
// This is a long comment to justify a simple calculation.
|
||||
//
|
||||
// Because there is a secondary "outer" build system potentially also
|
||||
// scheduling multiple drivers in parallel on separate build targets
|
||||
// -- while we, the driver, schedule our own subprocesses -- we might
|
||||
// be creating up to $NCPU^2 worth of _memory pressure_.
|
||||
//
|
||||
// Oversubscribing CPU is typically no problem these days, but
|
||||
// oversubscribing memory can lead to paging, which on modern systems
|
||||
// is quite bad.
|
||||
//
|
||||
// In practice, $NCPU^2 processes doesn't _quite_ happen: as core
|
||||
// count rises, it usually exceeds the number of large targets
|
||||
// without any dependencies between them (which are the only thing we
|
||||
// have to worry about): you might have (say) 2 large independent
|
||||
// modules * 2 architectures, but that's only an $NTARGET value of 4,
|
||||
// which is much less than $NCPU if you're on a 24 or 36-way machine.
|
||||
//
|
||||
// So the actual number of concurrent processes is:
|
||||
//
|
||||
// NCONCUR := $NCPU * min($NCPU, $NTARGET)
|
||||
//
|
||||
// Empirically, a frontend uses about 512kb RAM per non-primary file
|
||||
// and about 10mb per primary. The number of non-primaries per
|
||||
// process is a constant in a given module, but the number of
|
||||
// primaries -- the "batch size" -- is inversely proportional to the
|
||||
// batch count (default: $NCPU). As a result, the memory pressure
|
||||
// we can expect is:
|
||||
//
|
||||
// $NCONCUR * (($NONPRIMARYMEM * $NFILE) +
|
||||
// ($PRIMARYMEM * ($NFILE/$NCPU)))
|
||||
//
|
||||
// If we tabulate this across some plausible values, we see
|
||||
// unfortunate memory-pressure results:
|
||||
//
|
||||
// $NFILE
|
||||
// +---------------------
|
||||
// $NTARGET $NCPU | 100 500 1000
|
||||
// ----------------+---------------------
|
||||
// 2 2 | 2gb 11gb 22gb
|
||||
// 4 4 | 4gb 24gb 48gb
|
||||
// 4 8 | 5gb 28gb 56gb
|
||||
// 4 16 | 7gb 36gb 72gb
|
||||
// 4 36 | 11gb 56gb 112gb
|
||||
//
|
||||
// As it happens, the lower parts of the table are dominated by
|
||||
// number of processes rather than the files-per-batch (the batches
|
||||
// are already quite small due to the high core count) and the left
|
||||
// side of the table is dealing with modules too small to worry
|
||||
// about. But the middle and upper-right quadrant is problematic: 4
|
||||
// and 8 core machines do not typically have 24-48gb of RAM, it'd be
|
||||
// nice not to page on them when building a 4-target project with
|
||||
// 500-file modules.
|
||||
//
|
||||
// Turns we can do that if we just cap the batch size statically at,
|
||||
// say, 25 files per batch, we get a better formula:
|
||||
//
|
||||
// $NCONCUR * (($NONPRIMARYMEM * $NFILE) +
|
||||
// ($PRIMARYMEM * min(25, ($NFILE/$NCPU))))
|
||||
//
|
||||
// $NFILE
|
||||
// +---------------------
|
||||
// $NTARGET $NCPU | 100 500 1000
|
||||
// ----------------+---------------------
|
||||
// 2 2 | 1gb 2gb 3gb
|
||||
// 4 4 | 4gb 8gb 12gb
|
||||
// 4 8 | 5gb 16gb 24gb
|
||||
// 4 16 | 7gb 32gb 48gb
|
||||
// 4 36 | 11gb 56gb 108gb
|
||||
//
|
||||
// This means that the "performance win" of batch mode diminishes
|
||||
// slightly: the batching factor in the equation drops from
|
||||
// ($NFILE/$NCPU) to min(25, $NFILE/$NCPU). In practice this seems to
|
||||
// not cost too much: the additional factor in number of subprocesses
|
||||
// run is the following:
|
||||
//
|
||||
// $NFILE
|
||||
// +---------------------
|
||||
// $NTARGET $NCPU | 100 500 1000
|
||||
// ----------------+---------------------
|
||||
// 2 2 | 2x 10x 20x
|
||||
// 4 4 | - 5x 10x
|
||||
// 4 8 | - 2.5x 5x
|
||||
// 4 16 | - 1.25x 2.5x
|
||||
// 4 36 | - - 1.1x
|
||||
//
|
||||
// Where - means "no difference" because the batches were already
|
||||
// smaller than 25.
|
||||
//
|
||||
// Even in the worst case here, the 1000-file module on 2-core
|
||||
// machine is being built with only 40 subprocesses, rather than the
|
||||
// pre-batch-mode 1000. I.e. it's still running 96% fewer
|
||||
// subprocesses than before. And significantly: it's doing so while
|
||||
// not exceeding the RAM of a typical 2-core laptop.
|
||||
|
||||
// An explanation of why the partition calculation isn't integer division.
|
||||
// Using an example, a module of 26 files exceeds the limit of 25 and must
|
||||
// be compiled in 2 batches. Integer division yields 26/25 = 1 batch, but
|
||||
// a single batch of 26 exceeds the limit. The calculation must round up,
|
||||
// which can be calculated using: `(x + y - 1) / y`
|
||||
auto DivideRoundingUp = [](size_t Num, size_t Div) -> size_t {
|
||||
return (Num + Div - 1) / Div;
|
||||
};
|
||||
|
||||
size_t DefaultSizeLimit = 25;
|
||||
size_t NumTasks = TQ->getNumberOfParallelTasks();
|
||||
size_t NumFiles = PendingExecution.size();
|
||||
size_t SizeLimit = Comp.getBatchSizeLimit().value_or(DefaultSizeLimit);
|
||||
return std::max(NumTasks, DivideRoundingUp(NumFiles, SizeLimit));
|
||||
}
|
||||
|
||||
/// Select jobs that are batch-combinable from \c PendingExecution, combine
|
||||
/// them together into \p BatchJob instances (also inserted into \p
|
||||
/// BatchJobs), and enqueue all \c PendingExecution jobs (whether batched or
|
||||
/// not) into the \c TaskQueue for execution.
|
||||
void formBatchJobsAndAddPendingJobsToTaskQueue() {
|
||||
|
||||
// If batch mode is not enabled, just transfer the set of pending jobs to
|
||||
// the task queue, as-is.
|
||||
if (!Comp.getBatchModeEnabled()) {
|
||||
transferJobsToTaskQueue(PendingExecution, "standard");
|
||||
return;
|
||||
}
|
||||
|
||||
size_t NumPartitions = pickNumberOfPartitions();
|
||||
CommandSetVector Batchable, NonBatchable;
|
||||
std::vector<const Job *> Batches;
|
||||
|
||||
// Split the batchable from non-batchable pending jobs.
|
||||
getPendingBatchableJobs(Batchable, NonBatchable);
|
||||
|
||||
// Partition the batchable jobs into sets.
|
||||
BatchPartition Partition(NumPartitions);
|
||||
partitionIntoBatches(Batchable.takeVector(), Partition);
|
||||
|
||||
// Construct a BatchJob from each batch in the partition.
|
||||
for (auto const &Batch : Partition) {
|
||||
formBatchJobFromPartitionBatch(Batches, Batch);
|
||||
}
|
||||
|
||||
PendingExecution.clear();
|
||||
|
||||
// Save batches so we can locate and decompose them on task-exit.
|
||||
for (const Job *Cmd : Batches)
|
||||
BatchJobs.insert(Cmd);
|
||||
|
||||
// Enqueue the resulting jobs, batched and non-batched alike.
|
||||
transferJobsToTaskQueue(Batches, "batch");
|
||||
transferJobsToTaskQueue(NonBatchable, "non-batch");
|
||||
void addPendingJobsToTaskQueue() {
|
||||
transferJobsToTaskQueue(PendingExecution, "standard");
|
||||
return;
|
||||
}
|
||||
|
||||
void runTaskQueueToCompletion() {
|
||||
@@ -958,14 +592,14 @@ namespace driver {
|
||||
// there are, we need to continue trying to make progress on the
|
||||
// TaskQueue before we start marking deferred jobs as skipped, below.
|
||||
if (!PendingExecution.empty() && ResultCode == 0) {
|
||||
formBatchJobsAndAddPendingJobsToTaskQueue();
|
||||
addPendingJobsToTaskQueue();
|
||||
continue;
|
||||
}
|
||||
|
||||
// It's possible that by marking some jobs as skipped, we unblocked
|
||||
// some jobs and thus have entries in PendingExecution again; push
|
||||
// those through to the TaskQueue.
|
||||
formBatchJobsAndAddPendingJobsToTaskQueue();
|
||||
addPendingJobsToTaskQueue();
|
||||
|
||||
// If we added jobs to the TaskQueue, and we are not in an error state,
|
||||
// we want to give the TaskQueue another run.
|
||||
|
||||
@@ -97,9 +97,9 @@ void Driver::parseDriverKind(ArrayRef<const char *> Args) {
|
||||
std::optional<DriverKind> Kind =
|
||||
llvm::StringSwitch<std::optional<DriverKind>>(DriverName)
|
||||
.Case("swift", DriverKind::Interactive)
|
||||
.Case("swiftc", DriverKind::Batch)
|
||||
.Case("swiftc", DriverKind::Standard)
|
||||
.Case("swift-legacy-driver", DriverKind::Interactive)
|
||||
.Case("swiftc-legacy-driver", DriverKind::Batch)
|
||||
.Case("swiftc-legacy-driver", DriverKind::Standard)
|
||||
.Case("sil-opt", DriverKind::SILOpt)
|
||||
.Case("sil-func-extractor", DriverKind::SILFuncExtractor)
|
||||
.Case("sil-nm", DriverKind::SILNM)
|
||||
@@ -462,33 +462,6 @@ static bool getFilelistThreshold(DerivedArgList &Args, size_t &FilelistThreshold
|
||||
return false;
|
||||
}
|
||||
|
||||
static unsigned
|
||||
getDriverBatchSeed(llvm::opt::InputArgList &ArgList,
|
||||
DiagnosticEngine &Diags) {
|
||||
unsigned DriverBatchSeed = 0;
|
||||
if (const Arg *A = ArgList.getLastArg(options::OPT_driver_batch_seed)) {
|
||||
if (StringRef(A->getValue()).getAsInteger(10, DriverBatchSeed)) {
|
||||
Diags.diagnose(SourceLoc(), diag::error_invalid_arg_value,
|
||||
A->getAsString(ArgList), A->getValue());
|
||||
}
|
||||
}
|
||||
return DriverBatchSeed;
|
||||
}
|
||||
|
||||
static std::optional<unsigned>
|
||||
getDriverBatchCount(llvm::opt::InputArgList &ArgList, DiagnosticEngine &Diags) {
|
||||
if (const Arg *A = ArgList.getLastArg(options::OPT_driver_batch_count)) {
|
||||
unsigned Count = 0;
|
||||
if (StringRef(A->getValue()).getAsInteger(10, Count)) {
|
||||
Diags.diagnose(SourceLoc(), diag::error_invalid_arg_value,
|
||||
A->getAsString(ArgList), A->getValue());
|
||||
} else {
|
||||
return Count;
|
||||
}
|
||||
}
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
static std::string
|
||||
computeWorkingDirectory(const llvm::opt::InputArgList *ArgList) {
|
||||
if (auto *A = ArgList->getLastArg(options::OPT_working_directory)) {
|
||||
@@ -535,38 +508,13 @@ createStatsReporter(const llvm::opt::InputArgList *ArgList,
|
||||
}
|
||||
|
||||
static bool
|
||||
computeContinueBuildingAfterErrors(const bool BatchMode,
|
||||
const llvm::opt::InputArgList *ArgList) {
|
||||
// Note: Batch mode handling of serialized diagnostics requires that all
|
||||
// batches get to run, in order to make sure that all diagnostics emitted
|
||||
// during the compilation end up in at least one serialized diagnostic file.
|
||||
// Therefore, treat batch mode as implying -continue-building-after-errors.
|
||||
// (This behavior could be limited to only when serialized diagnostics are
|
||||
// being emitted, but this seems more consistent and less surprising for
|
||||
// users.)
|
||||
computeContinueBuildingAfterErrors(const llvm::opt::InputArgList *ArgList) {
|
||||
// FIXME: We don't really need (or want) a full ContinueBuildingAfterErrors.
|
||||
// If we fail to precompile a bridging header, for example, there's no need
|
||||
// to go on to compilation of source files, and if compilation of source files
|
||||
// fails, we shouldn't try to link. Instead, we'd want to let all jobs finish
|
||||
// but not schedule any new ones.
|
||||
return BatchMode ||
|
||||
ArgList->hasArg(options::OPT_continue_building_after_errors);
|
||||
|
||||
}
|
||||
|
||||
static std::optional<unsigned>
|
||||
getDriverBatchSizeLimit(llvm::opt::InputArgList &ArgList,
|
||||
DiagnosticEngine &Diags) {
|
||||
if (const Arg *A = ArgList.getLastArg(options::OPT_driver_batch_size_limit)) {
|
||||
unsigned Limit = 0;
|
||||
if (StringRef(A->getValue()).getAsInteger(10, Limit)) {
|
||||
Diags.diagnose(SourceLoc(), diag::error_invalid_arg_value,
|
||||
A->getAsString(ArgList), A->getValue());
|
||||
} else {
|
||||
return Limit;
|
||||
}
|
||||
}
|
||||
return std::nullopt;
|
||||
return ArgList->hasArg(options::OPT_continue_building_after_errors);
|
||||
}
|
||||
|
||||
std::unique_ptr<Compilation>
|
||||
@@ -606,9 +554,8 @@ Driver::buildCompilation(const ToolChain &TC,
|
||||
|
||||
// Determine the OutputInfo for the driver.
|
||||
OutputInfo OI;
|
||||
bool BatchMode = false;
|
||||
OI.CompilerMode = computeCompilerMode(*TranslatedArgList, Inputs, BatchMode);
|
||||
buildOutputInfo(TC, *TranslatedArgList, BatchMode, Inputs, OI);
|
||||
OI.CompilerMode = computeCompilerMode(*TranslatedArgList, Inputs);
|
||||
buildOutputInfo(TC, *TranslatedArgList, Inputs, OI);
|
||||
|
||||
if (Diags.hadAnyError() && !AllowErrors)
|
||||
return nullptr;
|
||||
@@ -668,7 +615,7 @@ Driver::buildCompilation(const ToolChain &TC,
|
||||
const bool DriverPrintDerivedOutputFileMap =
|
||||
ArgList->hasArg(options::OPT_driver_print_derived_output_file_map);
|
||||
const bool ContinueBuildingAfterErrors =
|
||||
computeContinueBuildingAfterErrors(BatchMode, ArgList.get());
|
||||
computeContinueBuildingAfterErrors(ArgList.get());
|
||||
const bool ShowJobLifecycle =
|
||||
ArgList->hasArg(options::OPT_driver_show_job_lifecycle);
|
||||
|
||||
@@ -677,11 +624,6 @@ Driver::buildCompilation(const ToolChain &TC,
|
||||
// constructor in a block:
|
||||
std::unique_ptr<Compilation> C;
|
||||
{
|
||||
const unsigned DriverBatchSeed = getDriverBatchSeed(*ArgList, Diags);
|
||||
const std::optional<unsigned> DriverBatchCount =
|
||||
getDriverBatchCount(*ArgList, Diags);
|
||||
const std::optional<unsigned> DriverBatchSizeLimit =
|
||||
getDriverBatchSizeLimit(*ArgList, Diags);
|
||||
const bool SaveTemps = ArgList->hasArg(options::OPT_save_temps);
|
||||
const bool ShowDriverTimeCompilation =
|
||||
ArgList->hasArg(options::OPT_driver_time_compilation);
|
||||
@@ -699,10 +641,6 @@ Driver::buildCompilation(const ToolChain &TC,
|
||||
std::move(TranslatedArgList),
|
||||
std::move(Inputs),
|
||||
DriverFilelistThreshold,
|
||||
BatchMode,
|
||||
DriverBatchSeed,
|
||||
DriverBatchCount,
|
||||
DriverBatchSizeLimit,
|
||||
SaveTemps,
|
||||
ShowDriverTimeCompilation,
|
||||
std::move(StatsReporter),
|
||||
@@ -870,7 +808,7 @@ Driver::parseArgStrings(ArrayRef<const char *> Args) {
|
||||
unsigned UnsupportedFlag = 0;
|
||||
if (driverKind == DriverKind::Interactive)
|
||||
UnsupportedFlag = options::NoInteractiveOption;
|
||||
else if (driverKind == DriverKind::Batch)
|
||||
else if (driverKind == DriverKind::Standard)
|
||||
UnsupportedFlag = options::NoBatchOption;
|
||||
|
||||
if (UnsupportedFlag)
|
||||
@@ -1076,7 +1014,7 @@ static bool isSDKTooOld(StringRef sdkPath, const llvm::Triple &target) {
|
||||
}
|
||||
|
||||
void Driver::buildOutputInfo(const ToolChain &TC, const DerivedArgList &Args,
|
||||
const bool BatchMode, const InputFileList &Inputs,
|
||||
const InputFileList &Inputs,
|
||||
OutputInfo &OI) const {
|
||||
|
||||
if (const Arg *A = Args.getLastArg(options::OPT_lto)) {
|
||||
@@ -1109,9 +1047,7 @@ void Driver::buildOutputInfo(const ToolChain &TC, const DerivedArgList &Args,
|
||||
: CompilerOutputType;
|
||||
|
||||
if (const Arg *A = Args.getLastArg(options::OPT_num_threads)) {
|
||||
if (BatchMode) {
|
||||
Diags.diagnose(SourceLoc(), diag::warning_cannot_multithread_batch_mode);
|
||||
} else if (StringRef(A->getValue()).getAsInteger(10, OI.numThreads)) {
|
||||
if (StringRef(A->getValue()).getAsInteger(10, OI.numThreads)) {
|
||||
Diags.diagnose(SourceLoc(), diag::error_invalid_arg_value,
|
||||
A->getAsString(Args), A->getValue());
|
||||
}
|
||||
@@ -1509,8 +1445,7 @@ void Driver::buildOutputInfo(const ToolChain &TC, const DerivedArgList &Args,
|
||||
|
||||
OutputInfo::Mode
|
||||
Driver::computeCompilerMode(const DerivedArgList &Args,
|
||||
const InputFileList &Inputs,
|
||||
bool &BatchModeOut) const {
|
||||
const InputFileList &Inputs) const {
|
||||
|
||||
if (driverKind == Driver::DriverKind::Interactive)
|
||||
return Inputs.empty() ? OutputInfo::Mode::REPL
|
||||
@@ -1524,10 +1459,6 @@ Driver::computeCompilerMode(const DerivedArgList &Args,
|
||||
options::OPT_index_file,
|
||||
UseWMO ? options::OPT_whole_module_optimization : llvm::opt::OptSpecifier());
|
||||
|
||||
BatchModeOut = Args.hasFlag(options::OPT_enable_batch_mode,
|
||||
options::OPT_disable_batch_mode,
|
||||
false);
|
||||
|
||||
// AST dump doesn't work with `-wmo`. Since it's not common to want to dump
|
||||
// the AST, we assume that's the priority and ignore `-wmo`, but we warn the
|
||||
// user about this decision.
|
||||
@@ -1538,18 +1469,8 @@ Driver::computeCompilerMode(const DerivedArgList &Args,
|
||||
return OutputInfo::Mode::StandardCompile;
|
||||
}
|
||||
|
||||
// Override batch mode if given -wmo or -index-file.
|
||||
if (ArgRequiringSingleCompile) {
|
||||
if (BatchModeOut) {
|
||||
BatchModeOut = false;
|
||||
// Emit a warning about such overriding (FIXME: we might conditionalize
|
||||
// this based on the user or xcode passing -disable-batch-mode).
|
||||
Diags.diagnose(SourceLoc(), diag::warn_ignoring_batch_mode,
|
||||
ArgRequiringSingleCompile->getOption().getPrefixedName());
|
||||
}
|
||||
if (ArgRequiringSingleCompile)
|
||||
return OutputInfo::Mode::SingleCompile;
|
||||
}
|
||||
|
||||
return OutputInfo::Mode::StandardCompile;
|
||||
}
|
||||
|
||||
@@ -1792,9 +1713,6 @@ void Driver::buildActions(SmallVectorImpl<const Action *> &TopLevelActions,
|
||||
AllLinkerInputs.push_back(CA);
|
||||
break;
|
||||
}
|
||||
case OutputInfo::Mode::BatchModeCompile: {
|
||||
llvm_unreachable("Batch mode should not be used to build actions");
|
||||
}
|
||||
case OutputInfo::Mode::Immediate: {
|
||||
if (Inputs.empty())
|
||||
return;
|
||||
@@ -2709,15 +2627,6 @@ void Driver::computeMainOutput(
|
||||
file_types::isAfterLLVM(JA->getType())) {
|
||||
// Multi-threaded compilation: A single frontend command produces multiple
|
||||
// output file: one for each input files.
|
||||
|
||||
// In batch mode, the driver will try to reserve multiple differing main
|
||||
// outputs to a bridging header. Another assertion will trip, but the cause
|
||||
// will be harder to track down. Since the driver now ignores -num-threads
|
||||
// in batch mode, the user should never be able to falsify this assertion.
|
||||
assert(!C.getBatchModeEnabled() && "Batch mode produces only one main "
|
||||
"output per input action, cannot have "
|
||||
"batch mode & num-threads");
|
||||
|
||||
auto OutputFunc = [&](StringRef Base, StringRef Primary) {
|
||||
const TypeToPathMap *OMForInput = nullptr;
|
||||
if (OFM)
|
||||
@@ -2914,7 +2823,6 @@ void Driver::chooseModuleInterfacePath(Compilation &C, const JobAction *JA,
|
||||
CommandOutput *output) const {
|
||||
switch (C.getOutputInfo().CompilerMode) {
|
||||
case OutputInfo::Mode::StandardCompile:
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
if (!isa<MergeModuleJobAction>(JA))
|
||||
return;
|
||||
break;
|
||||
@@ -3146,7 +3054,7 @@ void Driver::printHelp(bool ShowHidden) const {
|
||||
case DriverKind::Interactive:
|
||||
ExcludedFlagsBitmask |= options::NoInteractiveOption;
|
||||
break;
|
||||
case DriverKind::Batch:
|
||||
case DriverKind::Standard:
|
||||
case DriverKind::SILOpt:
|
||||
case DriverKind::SILFuncExtractor:
|
||||
case DriverKind::SILNM:
|
||||
@@ -3190,7 +3098,6 @@ bool OutputInfo::mightHaveExplicitPrimaryInputs(
|
||||
const CommandOutput &Output) const {
|
||||
switch (CompilerMode) {
|
||||
case Mode::StandardCompile:
|
||||
case Mode::BatchModeCompile:
|
||||
return true;
|
||||
case Mode::SingleCompile:
|
||||
return false;
|
||||
|
||||
@@ -486,21 +486,3 @@ StringRef Job::getFirstSwiftPrimaryInput() const {
|
||||
return inputInput->getInputArg().getValue();
|
||||
return StringRef();
|
||||
}
|
||||
|
||||
BatchJob::BatchJob(const JobAction &Source,
|
||||
SmallVectorImpl<const Job *> &&Inputs,
|
||||
std::unique_ptr<CommandOutput> Output,
|
||||
const char *Executable, llvm::opt::ArgStringList Arguments,
|
||||
EnvironmentVector ExtraEnvironment,
|
||||
std::vector<FilelistInfo> Infos,
|
||||
ArrayRef<const Job *> Combined, int64_t &NextQuasiPID,
|
||||
std::optional<ResponseFileInfo> ResponseFile)
|
||||
: Job(Source, std::move(Inputs), std::move(Output), Executable, Arguments,
|
||||
ExtraEnvironment, Infos, ResponseFile),
|
||||
CombinedJobs(Combined.begin(), Combined.end()),
|
||||
QuasiPIDBase(NextQuasiPID) {
|
||||
|
||||
assert(QuasiPIDBase < 0);
|
||||
NextQuasiPID -= CombinedJobs.size();
|
||||
assert(NextQuasiPID < 0);
|
||||
}
|
||||
|
||||
@@ -219,80 +219,6 @@ static bool jobsHaveSameEnvironment(const Job *A, const Job *B) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool ToolChain::jobIsBatchable(const Compilation &C, const Job *A) const {
|
||||
// FIXME: There might be a tighter criterion to use here?
|
||||
if (C.getOutputInfo().CompilerMode != OutputInfo::Mode::StandardCompile)
|
||||
return false;
|
||||
auto const *CJActA = dyn_cast<const CompileJobAction>(&A->getSource());
|
||||
if (!CJActA)
|
||||
return false;
|
||||
// When having only one job output a dependency file, that job is not
|
||||
// batchable since it has an oddball set of additional output types.
|
||||
if (C.OnlyOneDependencyFile &&
|
||||
A->getOutput().hasAdditionalOutputForType(file_types::TY_Dependencies))
|
||||
return false;
|
||||
return CJActA->findSingleSwiftInput() != nullptr;
|
||||
}
|
||||
|
||||
bool ToolChain::jobsAreBatchCombinable(const Compilation &C, const Job *A,
|
||||
const Job *B) const {
|
||||
assert(jobIsBatchable(C, A));
|
||||
assert(jobIsBatchable(C, B));
|
||||
return (jobsHaveSameExecutableNames(A, B) && jobsHaveSameOutputTypes(A, B) &&
|
||||
jobsHaveSameEnvironment(A, B));
|
||||
}
|
||||
|
||||
/// Form a synthetic \c CommandOutput for a \c BatchJob by merging together the
|
||||
/// \c CommandOutputs of all the jobs passed.
|
||||
static std::unique_ptr<CommandOutput>
|
||||
makeBatchCommandOutput(ArrayRef<const Job *> jobs, Compilation &C,
|
||||
file_types::ID outputType) {
|
||||
auto output =
|
||||
std::make_unique<CommandOutput>(outputType, C.getDerivedOutputFileMap());
|
||||
for (auto const *J : jobs) {
|
||||
output->addOutputs(J->getOutput());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
/// Set-union the \c Inputs and \c InputActions from each \c Job in \p jobs into
|
||||
/// the provided \p inputJobs and \p inputActions vectors, further adding all \c
|
||||
/// Actions in the \p jobs -- InputActions or otherwise -- to \p batchCJA. Do
|
||||
/// set-union rather than concatenation here to avoid mentioning the same input
|
||||
/// multiple times.
|
||||
static bool
|
||||
mergeBatchInputs(ArrayRef<const Job *> jobs,
|
||||
llvm::SmallSetVector<const Job *, 16> &inputJobs,
|
||||
llvm::SmallSetVector<const Action *, 16> &inputActions,
|
||||
CompileJobAction *batchCJA) {
|
||||
|
||||
llvm::SmallSetVector<const Action *, 16> allActions;
|
||||
|
||||
for (auto const *J : jobs) {
|
||||
for (auto const *I : J->getInputs()) {
|
||||
inputJobs.insert(I);
|
||||
}
|
||||
auto const *CJA = dyn_cast<CompileJobAction>(&J->getSource());
|
||||
if (!CJA)
|
||||
return true;
|
||||
for (auto const *I : CJA->getInputs()) {
|
||||
// Capture _all_ input actions -- whether or not they are InputActions --
|
||||
// in allActions, to set as the inputs for batchCJA below.
|
||||
allActions.insert(I);
|
||||
// Only collect input actions that _are InputActions_ in the inputActions
|
||||
// array, to load into the JobContext in our caller.
|
||||
if (auto const *IA = dyn_cast<InputAction>(I)) {
|
||||
inputActions.insert(IA);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (auto const *I : allActions) {
|
||||
batchCJA->addInput(I);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void ToolChain::addLinkedLibArgs(const llvm::opt::ArgList &Args,
|
||||
llvm::opt::ArgStringList &FrontendArgs) {
|
||||
Args.getLastArg(options::OPT_l);
|
||||
@@ -302,62 +228,6 @@ void ToolChain::addLinkedLibArgs(const llvm::opt::ArgList &Args,
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a \c BatchJob by merging the constituent \p jobs' CommandOutput,
|
||||
/// input \c Job and \c Action members. Call through to \c constructInvocation
|
||||
/// on \p BatchJob, to build the \c InvocationInfo.
|
||||
std::unique_ptr<Job>
|
||||
ToolChain::constructBatchJob(ArrayRef<const Job *> unsortedJobs,
|
||||
Job::PID &NextQuasiPID,
|
||||
Compilation &C) const {
|
||||
if (unsortedJobs.empty())
|
||||
return nullptr;
|
||||
|
||||
llvm::SmallVector<const Job *, 16> sortedJobs;
|
||||
C.sortJobsToMatchCompilationInputs(unsortedJobs, sortedJobs);
|
||||
|
||||
// Synthetic OutputInfo is a slightly-modified version of the initial
|
||||
// compilation's OI.
|
||||
auto OI = C.getOutputInfo();
|
||||
OI.CompilerMode = OutputInfo::Mode::BatchModeCompile;
|
||||
|
||||
auto const *executablePath = sortedJobs[0]->getExecutable();
|
||||
auto outputType = sortedJobs[0]->getOutput().getPrimaryOutputType();
|
||||
auto output = makeBatchCommandOutput(sortedJobs, C, outputType);
|
||||
|
||||
llvm::SmallSetVector<const Job *, 16> inputJobs;
|
||||
llvm::SmallSetVector<const Action *, 16> inputActions;
|
||||
auto *batchCJA = C.createAction<CompileJobAction>(outputType);
|
||||
if (mergeBatchInputs(sortedJobs, inputJobs, inputActions, batchCJA))
|
||||
return nullptr;
|
||||
|
||||
JobContext context{C, inputJobs.getArrayRef(), inputActions.getArrayRef(),
|
||||
*output, OI};
|
||||
auto invocationInfo = constructInvocation(*batchCJA, context);
|
||||
// Batch mode can produce quite long command lines; in almost every case these
|
||||
// will trigger use of supplementary output file maps. However, if the driver
|
||||
// command line is long for reasons unrelated to the number of input files,
|
||||
// such as passing a large number of flags, then the individual batch jobs are
|
||||
// also likely to overflow. We have to check for that explicitly here, because
|
||||
// the BatchJob created here does not go through the same code path in
|
||||
// constructJob above.
|
||||
//
|
||||
// The `allowsResponseFiles` flag on the `invocationInfo` we have here exists
|
||||
// only to model external tools that don't know about response files, such as
|
||||
// platform linkers; when talking to the frontend (which we control!) it
|
||||
// should always be true. But double check with an assert here in case someone
|
||||
// failed to set it in `constructInvocation`.
|
||||
assert(invocationInfo.allowsResponseFiles);
|
||||
auto responseFileInfo =
|
||||
getResponseFileInfo(C, executablePath, invocationInfo, context);
|
||||
|
||||
return std::make_unique<BatchJob>(
|
||||
*batchCJA, inputJobs.takeVector(), std::move(output), executablePath,
|
||||
std::move(invocationInfo.Arguments),
|
||||
std::move(invocationInfo.ExtraEnvironment),
|
||||
std::move(invocationInfo.FilelistInfos), sortedJobs, NextQuasiPID,
|
||||
responseFileInfo);
|
||||
}
|
||||
|
||||
llvm::Expected<file_types::ID>
|
||||
ToolChain::remarkFileTypeFromArgs(const llvm::opt::ArgList &Args) const {
|
||||
const Arg *A = Args.getLastArg(options::OPT_save_optimization_record_EQ);
|
||||
|
||||
@@ -174,7 +174,6 @@ void ToolChain::addCommonFrontendArgs(const OutputInfo &OI,
|
||||
LLVM_FALLTHROUGH;
|
||||
case OutputInfo::Mode::StandardCompile:
|
||||
case OutputInfo::Mode::SingleCompile:
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
arguments.push_back("-target");
|
||||
arguments.push_back(inputArgs.MakeArgString(Triple.str()));
|
||||
break;
|
||||
@@ -551,7 +550,6 @@ ToolChain::constructInvocation(const CompileJobAction &job,
|
||||
context.Args.AddLastArg(Arguments, options::OPT_pch_output_dir);
|
||||
switch (context.OI.CompilerMode) {
|
||||
case OutputInfo::Mode::StandardCompile:
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
// In the 'multiple invocations for each file' mode we don't need to
|
||||
// validate the PCH every time, it has been validated with the initial
|
||||
// -emit-pch invocation.
|
||||
@@ -717,7 +715,6 @@ const char *ToolChain::JobContext::computeFrontendModeForCompile() const {
|
||||
switch (OI.CompilerMode) {
|
||||
case OutputInfo::Mode::StandardCompile:
|
||||
case OutputInfo::Mode::SingleCompile:
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
break;
|
||||
case OutputInfo::Mode::Immediate:
|
||||
case OutputInfo::Mode::REPL:
|
||||
@@ -815,7 +812,6 @@ void ToolChain::JobContext::addFrontendInputAndOutputArguments(
|
||||
assert(InputActions.size() == 1 &&
|
||||
"Standard-compile mode takes exactly one input (the primary file)");
|
||||
break;
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
case OutputInfo::Mode::SingleCompile:
|
||||
break;
|
||||
case OutputInfo::Mode::Immediate:
|
||||
@@ -1082,7 +1078,6 @@ ToolChain::constructInvocation(const BackendJobAction &job,
|
||||
}
|
||||
break;
|
||||
}
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
case OutputInfo::Mode::Immediate:
|
||||
case OutputInfo::Mode::REPL:
|
||||
llvm_unreachable("invalid mode for backend job");
|
||||
@@ -1114,7 +1109,6 @@ ToolChain::constructInvocation(const BackendJobAction &job,
|
||||
context.Args.MakeArgString(OutNames[job.getInputIndex()]));
|
||||
break;
|
||||
}
|
||||
case OutputInfo::Mode::BatchModeCompile:
|
||||
case OutputInfo::Mode::Immediate:
|
||||
case OutputInfo::Mode::REPL:
|
||||
llvm_unreachable("invalid mode for backend job");
|
||||
|
||||
@@ -1,138 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/file-04.swift %t/file-05.swift
|
||||
// RUN: touch %t/file-06.swift %t/file-07.swift %t/file-08.swift %t/file-09.swift %t/file-10.swift
|
||||
// RUN: touch %t/file-11.swift %t/file-12.swift %t/file-13.swift %t/file-14.swift %t/file-15.swift
|
||||
// RUN: touch %t/file-16.swift %t/file-17.swift %t/file-18.swift %t/file-19.swift %t/file-20.swift
|
||||
// RUN: touch %t/file-21.swift %t/file-22.swift %t/file-23.swift %t/file-24.swift %t/file-25.swift
|
||||
// RUN: touch %t/file-26.swift %t/file-27.swift %t/file-28.swift %t/file-29.swift %t/file-30.swift
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -driver-show-job-lifecycle -driver-skip-execution -j 4 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/file-04.swift %t/file-05.swift %t/file-06.swift %t/file-07.swift %t/file-08.swift %t/file-09.swift %t/file-10.swift %t/file-11.swift %t/file-12.swift %t/file-13.swift %t/file-14.swift %t/file-15.swift %t/file-16.swift %t/file-17.swift %t/file-18.swift %t/file-19.swift %t/file-20.swift %t/file-21.swift %t/file-22.swift %t/file-23.swift %t/file-24.swift %t/file-25.swift %t/file-26.swift %t/file-27.swift %t/file-28.swift %t/file-29.swift %t/file-30.swift | %FileCheck %s -check-prefix=SEED0
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -driver-show-job-lifecycle -driver-skip-execution -j 4 -driver-batch-seed 1 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/file-04.swift %t/file-05.swift %t/file-06.swift %t/file-07.swift %t/file-08.swift %t/file-09.swift %t/file-10.swift %t/file-11.swift %t/file-12.swift %t/file-13.swift %t/file-14.swift %t/file-15.swift %t/file-16.swift %t/file-17.swift %t/file-18.swift %t/file-19.swift %t/file-20.swift %t/file-21.swift %t/file-22.swift %t/file-23.swift %t/file-24.swift %t/file-25.swift %t/file-26.swift %t/file-27.swift %t/file-28.swift %t/file-29.swift %t/file-30.swift | %FileCheck %s -check-prefix=SEED1
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -driver-show-job-lifecycle -driver-skip-execution -j 4 -driver-batch-seed 2 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/file-04.swift %t/file-05.swift %t/file-06.swift %t/file-07.swift %t/file-08.swift %t/file-09.swift %t/file-10.swift %t/file-11.swift %t/file-12.swift %t/file-13.swift %t/file-14.swift %t/file-15.swift %t/file-16.swift %t/file-17.swift %t/file-18.swift %t/file-19.swift %t/file-20.swift %t/file-21.swift %t/file-22.swift %t/file-23.swift %t/file-24.swift %t/file-25.swift %t/file-26.swift %t/file-27.swift %t/file-28.swift %t/file-29.swift %t/file-30.swift | %FileCheck %s -check-prefix=SEED2
|
||||
//
|
||||
// 30 files / 4 batches => 2 batches of 8 jobs + 2 batches of 7 jobs
|
||||
//
|
||||
// SEED0: Found 30 batchable jobs
|
||||
// SEED0: Forming into 4 batches
|
||||
// SEED0: Adding {compile: {{file-01-.*}} <= file-01.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-02-.*}} <= file-02.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-03-.*}} <= file-03.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-04-.*}} <= file-04.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-05-.*}} <= file-05.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-06-.*}} <= file-06.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-07-.*}} <= file-07.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-08-.*}} <= file-08.swift} to batch 0
|
||||
// SEED0: Adding {compile: {{file-09-.*}} <= file-09.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-10-.*}} <= file-10.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-11-.*}} <= file-11.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-12-.*}} <= file-12.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-13-.*}} <= file-13.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-14-.*}} <= file-14.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-15-.*}} <= file-15.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-16-.*}} <= file-16.swift} to batch 1
|
||||
// SEED0: Adding {compile: {{file-17-.*}} <= file-17.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-18-.*}} <= file-18.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-19-.*}} <= file-19.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-20-.*}} <= file-20.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-21-.*}} <= file-21.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-22-.*}} <= file-22.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-23-.*}} <= file-23.swift} to batch 2
|
||||
// SEED0: Adding {compile: {{file-24-.*}} <= file-24.swift} to batch 3
|
||||
// SEED0: Adding {compile: {{file-25-.*}} <= file-25.swift} to batch 3
|
||||
// SEED0: Adding {compile: {{file-26-.*}} <= file-26.swift} to batch 3
|
||||
// SEED0: Adding {compile: {{file-27-.*}} <= file-27.swift} to batch 3
|
||||
// SEED0: Adding {compile: {{file-28-.*}} <= file-28.swift} to batch 3
|
||||
// SEED0: Adding {compile: {{file-29-.*}} <= file-29.swift} to batch 3
|
||||
// SEED0: Adding {compile: {{file-30-.*}} <= file-30.swift} to batch 3
|
||||
// SEED0: Forming batch job from 8 constituents
|
||||
// SEED0: Forming batch job from 8 constituents
|
||||
// SEED0: Forming batch job from 7 constituents
|
||||
// SEED0: Forming batch job from 7 constituents
|
||||
// SEED0: Adding batch job to task queue: {compile: file-01{{.*}} file-02{{.*}} file-03{{.*}} ... 5 more <= file-01.swift file-02.swift file-03.swift ... 5 more}
|
||||
// SEED0: Adding batch job to task queue: {compile: file-09{{.*}} file-10{{.*}} file-11{{.*}} ... 5 more <= file-09.swift file-10.swift file-11.swift ... 5 more}
|
||||
// SEED0: Adding batch job to task queue: {compile: file-17{{.*}} file-18{{.*}} file-19{{.*}} ... 4 more <= file-17.swift file-18.swift file-19.swift ... 4 more}
|
||||
// SEED0: Adding batch job to task queue: {compile: file-24{{.*}} file-25{{.*}} file-26{{.*}} ... 4 more <= file-24.swift file-25.swift file-26.swift ... 4 more}
|
||||
//
|
||||
// SEED1: Found 30 batchable jobs
|
||||
// SEED1: Forming into 4 batches
|
||||
// SEED1: Adding {compile: {{file-01-.*}} <= file-01.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-02-.*}} <= file-02.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-03-.*}} <= file-03.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-04-.*}} <= file-04.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-05-.*}} <= file-05.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-06-.*}} <= file-06.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-07-.*}} <= file-07.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-08-.*}} <= file-08.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-09-.*}} <= file-09.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-10-.*}} <= file-10.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-11-.*}} <= file-11.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-12-.*}} <= file-12.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-13-.*}} <= file-13.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-14-.*}} <= file-14.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-15-.*}} <= file-15.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-16-.*}} <= file-16.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-17-.*}} <= file-17.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-18-.*}} <= file-18.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-19-.*}} <= file-19.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-20-.*}} <= file-20.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-21-.*}} <= file-21.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-22-.*}} <= file-22.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-23-.*}} <= file-23.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-24-.*}} <= file-24.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-25-.*}} <= file-25.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-26-.*}} <= file-26.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-27-.*}} <= file-27.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-28-.*}} <= file-28.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-29-.*}} <= file-29.swift} to batch {{[0-3]}}
|
||||
// SEED1: Adding {compile: {{file-30-.*}} <= file-30.swift} to batch {{[0-3]}}
|
||||
// SEED1: Forming batch job from 8 constituents
|
||||
// SEED1: Forming batch job from 8 constituents
|
||||
// SEED1: Forming batch job from 7 constituents
|
||||
// SEED1: Forming batch job from 7 constituents
|
||||
// SEED1-NOT: Adding batch job to task queue: {compile: file-01{{.*}} file-02{{.*}} file-03{{.*}} ... 5 more <= file-01.swift file-02.swift file-03.swift ... 5 more }
|
||||
// SEED1: Added to TaskQueue: {compile: {{.*}} <= {{file-[0-3][2-9].swift .*}}}
|
||||
// SEED1: Added to TaskQueue: {compile: {{.*}} <= {{.*}}}
|
||||
// SEED1: Added to TaskQueue: {compile: {{.*}} <= {{.*}}}
|
||||
//
|
||||
// SEED2: Found 30 batchable jobs
|
||||
// SEED2: Forming into 4 batches
|
||||
// SEED2: Adding {compile: {{file-01-.*}} <= file-01.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-02-.*}} <= file-02.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-03-.*}} <= file-03.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-04-.*}} <= file-04.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-05-.*}} <= file-05.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-06-.*}} <= file-06.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-07-.*}} <= file-07.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-08-.*}} <= file-08.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-09-.*}} <= file-09.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-10-.*}} <= file-10.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-11-.*}} <= file-11.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-12-.*}} <= file-12.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-13-.*}} <= file-13.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-14-.*}} <= file-14.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-15-.*}} <= file-15.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-16-.*}} <= file-16.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-17-.*}} <= file-17.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-18-.*}} <= file-18.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-19-.*}} <= file-19.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-20-.*}} <= file-20.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-21-.*}} <= file-21.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-22-.*}} <= file-22.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-23-.*}} <= file-23.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-24-.*}} <= file-24.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-25-.*}} <= file-25.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-26-.*}} <= file-26.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-27-.*}} <= file-27.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-28-.*}} <= file-28.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-29-.*}} <= file-29.swift} to batch {{[0-3]}}
|
||||
// SEED2: Adding {compile: {{file-30-.*}} <= file-30.swift} to batch {{[0-3]}}
|
||||
// SEED2: Forming batch job from 8 constituents
|
||||
// SEED2: Forming batch job from 8 constituents
|
||||
// SEED2: Forming batch job from 7 constituents
|
||||
// SEED2: Forming batch job from 7 constituents
|
||||
// SEED2-NOT: Adding batch job to task queue: {compile: file-01{{.*}} file-02{{.*}} file-03{{.*}} ... 5 more <= file-01.swift file-02.swift file-03.swift ... 5 more }
|
||||
// SEED2: Added to TaskQueue: {compile: {{.*}} <= {{file-[0-3][2-9].swift .*}}}
|
||||
// SEED2: Added to TaskQueue: {compile: {{.*}} <= {{.*}}}
|
||||
// SEED2: Added to TaskQueue: {compile: {{.*}} <= {{.*}}}
|
||||
@@ -1,20 +0,0 @@
|
||||
// When multiple additional-outputs on the same command-line are no longer
|
||||
// supported (i.e. when we've moved to mandatory use of output file maps for
|
||||
// communicating multiple additional-outputs to frontends) this test will no
|
||||
// longer make sense, and should be removed.
|
||||
//
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/file-04.swift %t/file-05.swift
|
||||
// RUN: touch %t/file-06.swift %t/file-07.swift %t/file-08.swift %t/file-09.swift
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -driver-batch-seed 1 -driver-print-jobs -driver-skip-execution -j 3 -emit-module -module-name foo %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/file-04.swift %t/file-05.swift %t/file-06.swift %t/file-07.swift %t/file-08.swift %t/file-09.swift >%t/out.txt
|
||||
// RUN: %FileCheck %s <%t/out.txt
|
||||
//
|
||||
// Each batch should get 3 primaries; check that each has 3 modules _in the same numeric order_.
|
||||
//
|
||||
// CHECK: {{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[A1:[0-9]+]].swift{{"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[A2:[0-9]+]].swift{{"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[A3:[0-9]+]].swift{{"?}}
|
||||
// CHECK-SAME: -o {{.*[\\/]}}file-[[A1]]-{{[a-z0-9]+}}.swiftmodule{{"?}} -o {{.*[\\/]}}file-[[A2]]-{{[a-z0-9]+}}.swiftmodule{{"?}} -o {{.*[\\/]}}file-[[A3]]-{{[a-z0-9]+}}.swiftmodule{{"?}}
|
||||
// CHECK: {{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[B1:[0-9]+]].swift{{"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[B2:[0-9]+]].swift{{"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[B3:[0-9]+]].swift{{"?}}
|
||||
// CHECK-SAME: -o {{.*[\\/]}}file-[[B1]]-{{[a-z0-9]+}}.swiftmodule{{"?}} -o {{.*[\\/]}}file-[[B2]]-{{[a-z0-9]+}}.swiftmodule{{"?}} -o {{.*[\\/]}}file-[[B3]]-{{[a-z0-9]+}}.swiftmodule{{"?}}
|
||||
// CHECK: {{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[C1:[0-9]+]].swift{{"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[C2:[0-9]+]].swift{{"?}} {{.*}}-primary-file {{[^ ]*[\\/]}}file-[[C3:[0-9]+]].swift{{"?}}
|
||||
// CHECK-SAME: -o {{.*[\\/]}}file-[[C1]]-{{[a-z0-9]+}}.swiftmodule{{"?}} -o {{.*[\\/]}}file-[[C2]]-{{[a-z0-9]+}}.swiftmodule{{"?}} -o {{.*[\\/]}}file-[[C3]]-{{[a-z0-9]+}}.swiftmodule{{"?}}
|
||||
@@ -1,25 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/file-01.swift %t/file-02.swift %t/file-03.swift
|
||||
// RUN: echo 'public func main() {}' >%t/main.swift
|
||||
// RUN: echo 'extern int foo;' >%t/foo-bridging-header.h
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-bridging-pch -v -import-objc-header %t/foo-bridging-header.h -enable-batch-mode -c -emit-module -module-name main -j 2 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/main.swift %s 2>&1 | %FileCheck %s
|
||||
//
|
||||
// Next we make a module map with an unknown attribute, which will cause an
|
||||
// AST-reader warning while (re)parsing the module map, while attaching a PCH.
|
||||
// We turn on serialized diagnostics in the frontends, and check that that
|
||||
// warning, issued before the batch-mode multi-file diagnostic multiplexor has
|
||||
// its file mappings established, does not crash the multiplexor.
|
||||
//
|
||||
// RUN: %empty-directory(%t/MyModule)
|
||||
// RUN: echo 'module MyModule [DefinitelyNotAnAttribute] { header "header.h" export * }' >%t/MyModule/module.modulemap
|
||||
// RUN: touch %t/MyModule/header.h
|
||||
// RUN: echo '#include "MyModule/header.h"' >>%t/foo-bridging-header.h
|
||||
// RUN: %swiftc_driver -enable-bridging-pch -v -I %t -import-objc-header %t/foo-bridging-header.h -enable-batch-mode -c -emit-module -module-name main -j 2 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/main.swift -serialize-diagnostics %s 2>&1 | %FileCheck %s
|
||||
//
|
||||
// CHECK: -emit-pch
|
||||
// CHECK: -primary-file {{.*}}/file-01.swift{{"?}} -primary-file {{.*}}/file-02.swift{{"?}}
|
||||
|
||||
func bar() {
|
||||
print(foo)
|
||||
}
|
||||
@@ -1,306 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/file-01.swift %t/file-02.swift %t/file-03.swift
|
||||
// RUN: echo 'public func main() {}' >%t/main.swift
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -parseable-output -driver-skip-execution -c -emit-module -module-name main -j 2 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/main.swift 2>&1 | %FileCheck -check-prefix CHECK %s
|
||||
//
|
||||
//
|
||||
// CHECK: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "began",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "command": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?(\\")?}} -frontend -c -primary-file {{.*}}/file-01.swift{{(\\")?}} {{.*}}file-02.swift{{(\\")?}} {{.*}}file-03.swift{{(\\")?}} {{.*}}main.swift{{(\\")?}} -emit-module-path {{.*}}file-01-[[MODULE01:[a-z0-9]+]].swiftmodule{{(\\")?}} -emit-module-doc-path {{.*}}file-01-[[SWIFTDOC01:[a-z0-9]+]].swiftdoc{{(\\")?}} {{.*}} {{.*}} -module-name main -o {{.*}}file-01-[[OBJ01:[a-z0-9]+]].o{{(\\")?}}",
|
||||
// CHECK-NEXT: "command_executable": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?}}",
|
||||
// CHECK-NEXT: "command_arguments": [
|
||||
// CHECK-NEXT: "-frontend",
|
||||
// CHECK-NEXT: "-c",
|
||||
// CHECK-NEXT: "-primary-file",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main.swift",
|
||||
// CHECK-NEXT: "-emit-module-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01-[[MODULE01:[a-z0-9]+]].swiftmodule",
|
||||
// CHECK-NEXT: "-emit-module-doc-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01-[[SWIFTDOC01:[a-z0-9]+]].swiftdoc",
|
||||
// CHECK: "-module-name",
|
||||
// CHECK-NEXT: "main",
|
||||
// CHECK-NEXT: "-o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01-[[OBJ01:[a-z0-9]+]].o"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "inputs": [
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01.swift"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "outputs": [
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "object",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-01-[[OBJ01]].o"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftmodule",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-01-[[MODULE01]].swiftmodule"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftdoc",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-01-[[SWIFTDOC01]].swiftdoc"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftsourceinfo",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-01-[[MODULE01]].swiftsourceinfo"
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: }
|
||||
// CHECK: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "began",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "command": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?(\\")?}} -frontend -c {{.*}}file-01.swift{{(\\")?}} -primary-file {{.*}}file-02.swift{{(\\")?}} {{.*}}file-03.swift{{(\\")?}} {{.*}}main.swift{{(\\")?}} -emit-module-path {{.*}}file-02-[[MODULE02:[a-z0-9]+]].swiftmodule{{(\\")?}} -emit-module-doc-path {{.*}}file-02-[[SWIFTDOC02:[a-z0-9]+]].swiftdoc{{(\\")?}} {{.*}} {{.*}} -module-name main -o {{.*}}file-02-[[OBJ02:[a-z0-9]+]].o{{(\\")?}}",
|
||||
// CHECK-NEXT: "command_executable": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?(\\")?}}",
|
||||
// CHECK-NEXT: "command_arguments": [
|
||||
// CHECK-NEXT: "-frontend",
|
||||
// CHECK-NEXT: "-c",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01.swift",
|
||||
// CHECK-NEXT: "-primary-file",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main.swift",
|
||||
// CHECK-NEXT: "-emit-module-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02-[[MODULE02:[a-z0-9]+]].swiftmodule",
|
||||
// CHECK-NEXT: "-emit-module-doc-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02-[[SWIFTDOC02:[a-z0-9]+]].swiftdoc",
|
||||
// CHECK: "-module-name",
|
||||
// CHECK-NEXT: "main",
|
||||
// CHECK-NEXT: "-o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02-[[OBJ02:[a-z0-9]+]].o"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "inputs": [
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02.swift"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "outputs": [
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "object",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-02-[[OBJ02]].o"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftmodule",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-02-[[MODULE02]].swiftmodule"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftdoc",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-02-[[SWIFTDOC02]].swiftdoc"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftsourceinfo",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-02-[[MODULE02]].swiftsourceinfo"
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: }
|
||||
// CHECK: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "began",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "command": "{{.*}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?(\\")?}} -frontend -c {{.*}}file-01.swift{{(\\")?}} {{.*}}file-02.swift{{(\\")?}} -primary-file {{.*}}file-03.swift{{(\\")?}} {{.*}}main.swift{{(\\")?}} -emit-module-path {{.*}}file-03-[[MODULE03:[a-z0-9]+]].swiftmodule{{(\\")?}} -emit-module-doc-path {{.*}}file-03-[[SWIFTDOC03:[a-z0-9]+]].swiftdoc{{(\\")?}} {{.*}} {{.*}} -module-name main -o {{.*}}file-03-[[OBJ03:[a-z0-9]+]].o{{(\\")?}}",
|
||||
// CHECK-NEXT: "command_executable": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?}}",
|
||||
// CHECK-NEXT: "command_arguments": [
|
||||
// CHECK-NEXT: "-frontend",
|
||||
// CHECK-NEXT: "-c",
|
||||
// CHECK-NEXT: "{{.*}}/file-01.swift",
|
||||
// CHECK-NEXT: "{{.*}}/file-02.swift",
|
||||
// CHECK-NEXT: "-primary-file",
|
||||
// CHECK-NEXT: "{{.*}}/file-03.swift",
|
||||
// CHECK-NEXT: "{{.*}}/main.swift",
|
||||
// CHECK-NEXT: "-emit-module-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03-[[MODULE03:[a-z0-9]+]].swiftmodule",
|
||||
// CHECK-NEXT: "-emit-module-doc-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03-[[SWIFTDOC03:[a-z0-9]+]].swiftdoc",
|
||||
// CHECK: "-module-name",
|
||||
// CHECK-NEXT: "main",
|
||||
// CHECK-NEXT: "-o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03-[[OBJ03:[a-z0-9]+]].o"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "inputs": [
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03.swift"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "outputs": [
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "object",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-03-[[OBJ03]].o"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftmodule",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-03-[[MODULE03]].swiftmodule"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftdoc",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-03-[[SWIFTDOC03]].swiftdoc"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftsourceinfo",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}file-03-[[MODULE03]].swiftsourceinfo"
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: }
|
||||
// CHECK: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "began",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "command": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?(\\")?}} -frontend -c {{.*[\\/]}}file-01.swift{{(\\")?}} {{.*[\\/]}}file-02.swift{{(\\")?}} {{.*[\\/]}}file-03.swift{{(\\")?}} -primary-file {{.*[\\/]}}main.swift{{(\\")?}} -emit-module-path {{.*[\\/]}}main-[[MODULEMAIN:[a-z0-9]+]].swiftmodule{{(\\")?}} -emit-module-doc-path {{.*[\\/]}}main-[[SWIFTDOCMAIN:[a-z0-9]+]].swiftdoc{{(\\")?}} {{.*}} {{.*}} -module-name main -o {{.*[\\/]}}main-[[OBJMAIN:[a-z0-9]+]].o{{(\\")?}}",
|
||||
// CHECK-NEXT: "command_executable": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?}}",
|
||||
// CHECK-NEXT: "command_arguments": [
|
||||
// CHECK-NEXT: "-frontend",
|
||||
// CHECK-NEXT: "-c",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02.swift",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03.swift",
|
||||
// CHECK-NEXT: "-primary-file",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main.swift",
|
||||
// CHECK-NEXT: "-emit-module-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main-[[MODULEMAIN:[a-z0-9]+]].swiftmodule",
|
||||
// CHECK-NEXT: "-emit-module-doc-path",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main-[[SWIFTDOCMAIN:[a-z0-9]+]].swiftdoc",
|
||||
// CHECK: "-module-name",
|
||||
// CHECK-NEXT: "main",
|
||||
// CHECK-NEXT: "-o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main-[[OBJMAIN:[a-z0-9]+]].o"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "inputs": [
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main.swift"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "outputs": [
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "object",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}main-[[OBJMAIN]].o"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftmodule",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}main-[[MODULEMAIN]].swiftmodule"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftdoc",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}main-[[SWIFTDOCMAIN]].swiftdoc"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftsourceinfo",
|
||||
// CHECK-NEXT: "path": "{{.*[\\/]}}main-[[MODULEMAIN]].swiftsourceinfo"
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "finished",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "output": "Output placeholder\n",
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: "exit-status": 0
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "finished",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "output": "Output placeholder\n",
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: "exit-status": 0
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "finished",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "output": "Output placeholder\n",
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: "exit-status": 0
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "finished",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "output": "Output placeholder\n",
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: "exit-status": 0
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "began",
|
||||
// CHECK-NEXT: "name": "merge-module",
|
||||
// CHECK-NEXT: "command": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?(\\")?}} -frontend -merge-modules -emit-module {{.*[\\/]}}file-01-[[MODULE01]].swiftmodule{{(\\")?}} {{.*[\\/]}}file-02-[[MODULE02]].swiftmodule{{(\\")?}} {{.*[\\/]}}file-03-[[MODULE03]].swiftmodule{{(\\")?}} {{.*[\\/]}}main-[[MODULEMAIN]].swiftmodule{{(\\")?}} {{.*}} -emit-module-doc-path main.swiftdoc -emit-module-source-info-path main.swiftsourceinfo -module-name main -o main.swiftmodule",
|
||||
// CHECK-NEXT: "command_executable": "{{.*[\\/]}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?}}",
|
||||
// CHECK-NEXT: "command_arguments": [
|
||||
// CHECK-NEXT: "-frontend",
|
||||
// CHECK-NEXT: "-merge-modules",
|
||||
// CHECK-NEXT: "-emit-module",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01-[[MODULE01]].swiftmodule",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02-[[MODULE02]].swiftmodule",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03-[[MODULE03]].swiftmodule",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main-[[MODULEMAIN]].swiftmodule",
|
||||
// CHECK: "-emit-module-doc-path",
|
||||
// CHECK-NEXT: "main.swiftdoc",
|
||||
// CHECK: "-emit-module-source-info-path",
|
||||
// CHECK-NEXT: "main.swiftsourceinfo",
|
||||
// CHECK-NEXT: "-module-name",
|
||||
// CHECK-NEXT: "main",
|
||||
// CHECK-NEXT: "-o",
|
||||
// CHECK-NEXT: "main.swiftmodule"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "inputs": [
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-01-[[OBJ01]].o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-02-[[OBJ02]].o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}file-03-[[OBJ03]].o",
|
||||
// CHECK-NEXT: "{{.*[\\/]}}main-[[OBJMAIN]].o"
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "outputs": [
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftmodule",
|
||||
// CHECK-NEXT: "path": "main.swiftmodule"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftdoc",
|
||||
// CHECK-NEXT: "path": "main.swiftdoc"
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "type": "swiftsourceinfo",
|
||||
// CHECK-NEXT: "path": "main.swiftsourceinfo"
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: ],
|
||||
// CHECK-NEXT: "pid": {{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: }
|
||||
// CHECK-NEXT: {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: {
|
||||
// CHECK-NEXT: "kind": "finished",
|
||||
// CHECK-NEXT: "name": "merge-module",
|
||||
// CHECK-NEXT: "pid": {{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "output": "Output placeholder\n",
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: "exit-status": 0
|
||||
// CHECK-NEXT: }
|
||||
@@ -1,24 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/file-01.swift
|
||||
// RUN: echo 'public func main() { help_an_error_happened() }' >%t/main.swift
|
||||
//
|
||||
// RUN: not %swiftc_driver -enable-batch-mode -parseable-output -serialize-diagnostics -c -emit-module -module-name main -j 1 %t/file-01.swift %t/main.swift 2>&1 | %FileCheck %s -check-prefix CHECK -check-prefix CHECK-%target-os
|
||||
//
|
||||
// CHECK: "kind": "signalled",
|
||||
// CHECK-NEXT: "name": "compile",
|
||||
// CHECK-NEXT: "pid": -{{[1-9][0-9]*}},
|
||||
// CHECK-NEXT: "process": {
|
||||
// CHECK-NEXT: "real_pid": {{[1-9][0-9]*}}
|
||||
//
|
||||
// This information is not available on POSIX systems where the child is
|
||||
// signalled, but it is available on Windows. We simply report it there since
|
||||
// we already have the information.
|
||||
// CHECK-windows-msvc-NEXT: "usage": {
|
||||
// CHECK-windows-msvc-NEXT: "utime":
|
||||
// CHECK-windows-msvc-NEXT: "stime":
|
||||
// CHECK-windows-msvc-NEXT: "maxrss":
|
||||
// CHECK-windows-msvc-NEXT: }
|
||||
//
|
||||
// CHECK-NEXT: },
|
||||
// CHECK-NEXT: "error-message": "{{.*}}",
|
||||
// CHECK-NEXT: "signal": 2
|
||||
@@ -1,19 +0,0 @@
|
||||
// Ensure that the -### and -driver-print-jobs options work properly in batch
|
||||
// mode. They should each do the same thing, so test them both.
|
||||
//
|
||||
// Test be sure that the output does reflect the batching, in other words
|
||||
// multiple primary files. Also test to be sure that the output is on
|
||||
// stdout, and NOT stderr.
|
||||
|
||||
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/file-01.swift %t/file-02.swift %t/file-03.swift
|
||||
// RUN: echo 'public func main() {}' >%t/main.swift
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -c -emit-module -module-name main -j 2 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/main.swift -driver-print-jobs 2>%t/stderr1 | %FileCheck %s -check-prefix=CHECK-COMBINED
|
||||
// RUN: %swiftc_driver -enable-batch-mode -c -emit-module -module-name main -j 2 %t/file-01.swift %t/file-02.swift %t/file-03.swift %t/main.swift -### 2>%t/stderr2 | %FileCheck %s -check-prefix=CHECK-COMBINED
|
||||
// RUN: %FileCheck %s -allow-empty -check-prefix=NEGATIVE-CHECK-COMBINED <%t/stderr1
|
||||
// RUN: %FileCheck %s -allow-empty -check-prefix=NEGATIVE-CHECK-COMBINED <%t/stderr2
|
||||
//
|
||||
// CHECK-COMBINED: -primary-file {{.*(/|\\)}}file-01.swift{{"?}} -primary-file {{.*(/|\\)}}file-02.swift{{"?}} {{.*(/|\\)}}file-03.swift{{"?}} {{.*(/|\\)}}main.swift{{"?}}
|
||||
// NEGATIVE-CHECK-COMBINED-NOT: -primary-file {{.*(/|\\)}}file-01.swift{{"?}} -primary-file {{.*(/|\\)}}file-02.swift{{"?}} {{.*(/|\\)}}file-03.swift{{"?}} {{.*(/|\\)}}main.swift{{"?}}
|
||||
@@ -1,8 +0,0 @@
|
||||
// Ensure that driver does not issue a remark iff in batch mode. (The remark has been removed.)
|
||||
//
|
||||
// RUN: %swiftc_driver -whole-module-optimization -enable-batch-mode %S/../Inputs/empty.swift -### 2>&1 >/dev/null | %FileCheck %s
|
||||
// RUN: %swiftc_driver %S/../Inputs/empty.swift -### 2>&1 >/dev/null | %FileCheck -allow-empty %s
|
||||
// RUN: %swiftc_driver -enable-batch-mode %S/../Inputs/empty.swift -### 2>&1 >/dev/null | %FileCheck -allow-empty %s
|
||||
// RUN: %swiftc_driver -enable-batch-mode -disable-batch-mode %S/../Inputs/empty.swift -### 2>&1 >/dev/null | %FileCheck -allow-empty %s
|
||||
//
|
||||
// CHECK-NOT: remark: using batch mode
|
||||
@@ -1,10 +0,0 @@
|
||||
// Batch jobs go through a different code path than other jobs, so make sure
|
||||
// that they also use response files correctly when their argument lists are
|
||||
// too long.
|
||||
|
||||
// RUN: %{python} -c 'for i in range(500001): print("-DTEST_" + str(i))' > %t.resp
|
||||
// RUN: %swiftc_driver -driver-print-jobs -module-name batch -enable-batch-mode -j 1 -c %S/Inputs/main.swift %S/Inputs/lib.swift @%t.resp 2>&1 > %t.jobs.txt
|
||||
// RUN: %FileCheck %s < %t.jobs.txt -check-prefix=BATCH
|
||||
|
||||
// BATCH: bin{{/|\\\\}}swift{{c?}}
|
||||
// BATCH: @{{[^ ]*}}arguments-{{[0-9a-zA-Z]+}}.resp{{"?}} # -frontend -c -primary-file {{[^ ]+}}/Inputs/main.swift{{"?}} -primary-file {{[^ ]+}}/Inputs/lib.swift
|
||||
@@ -1,38 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
//
|
||||
// RUN: %swiftc_driver -whole-module-optimization -enable-batch-mode %S/../Inputs/empty.swift -### 2>%t/stderr_WMO_batch | %FileCheck %s
|
||||
// RUN: %swiftc_driver -enable-batch-mode -whole-module-optimization %S/../Inputs/empty.swift -### 2>%t/stderr_batch_WMO | %FileCheck %s
|
||||
// CHECK-NOT: -primary-file
|
||||
// RUN: %FileCheck -check-prefix CHECK-WMO %s <%t/stderr_WMO_batch
|
||||
// RUN: %FileCheck -check-prefix CHECK-WMO %s <%t/stderr_batch_WMO
|
||||
// CHECK-WMO: warning: ignoring '-enable-batch-mode' because '-whole-module-optimization' was also specified
|
||||
//
|
||||
// RUN: %swiftc_driver -index-file -enable-batch-mode %S/../Inputs/empty.swift -### 2>%t/stderr_index_batch | %FileCheck %s
|
||||
// RUN: %swiftc_driver -enable-batch-mode -index-file %S/../Inputs/empty.swift -### 2>%t/stderr_batch_index | %FileCheck %s
|
||||
// RUN: %FileCheck -check-prefix CHECK-INDEX %s <%t/stderr_index_batch
|
||||
// RUN: %FileCheck -check-prefix CHECK-INDEX %s <%t/stderr_batch_index
|
||||
// CHECK-INDEX: warning: ignoring '-enable-batch-mode' because '-index-file' was also specified
|
||||
//
|
||||
// The following test is verifying that -disable-batch-mode overrides an earlier
|
||||
// -enable-batch-mode and silences the warning about mixing batch mode with
|
||||
// -index-file. Tools that take an existing command line and add -index-file can
|
||||
// thus add -disable-batch-mode without having to otherwise interpret the
|
||||
// arguments.
|
||||
//
|
||||
// RUN: %swiftc_driver -disable-batch-mode -index-file %S/../Inputs/empty.swift -### 2>%t/stderr_nobatch_index | %FileCheck %s
|
||||
// RUN: %swiftc_driver -enable-batch-mode -index-file %S/../Inputs/empty.swift -disable-batch-mode -### 2>%t/stderr_batch_nobatch_index | %FileCheck %s
|
||||
// RUN: %FileCheck -allow-empty -check-prefix CHECK-INDEX-DISABLED %s <%t/stderr_nobatch_index
|
||||
// RUN: %FileCheck -allow-empty -check-prefix CHECK-INDEX-DISABLED %s <%t/stderr_batch_nobatch_index
|
||||
// CHECK-INDEX-DISABLED-NOT: warning
|
||||
//
|
||||
// This next one is a regression test for a specific failure in the past: wmo +
|
||||
// batch mode should not just result in wmo, but also preserve the num-threads
|
||||
// argument and (crucially) the resulting fact that the single wmo subprocess
|
||||
// generates multiple output files. The build system that invokes swiftc expects
|
||||
// multiple outputs.
|
||||
//
|
||||
// RUN: touch %t/a.swift %t/b.swift %t/c.swift
|
||||
// RUN: %swiftc_driver %t/a.swift %t/b.swift %t/c.swift -num-threads 4 -whole-module-optimization -enable-batch-mode -### >%t/stdout_mt_wmo 2>%t/stderr_mt_wmo
|
||||
// RUN: %FileCheck --check-prefix CHECK-WMO %s <%t/stderr_mt_wmo
|
||||
// RUN: %FileCheck --check-prefix CHECK-MULTITHREADED-WMO-ARGS %s <%t/stdout_mt_wmo
|
||||
// CHECK-MULTITHREADED-WMO-ARGS: -num-threads 4 {{.*}}-o {{.*(/|\\)}}a-{{[a-z0-9]+}}.o{{"?}} -o {{.*(/|\\)}}b-{{[a-z0-9]+}}.o{{"?}} -o {{.*(/|\\)}}c-{{[a-z0-9]+}}.o{{"?}}
|
||||
@@ -1,13 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: echo 'print("Hello, World!")' >%t/main.swift
|
||||
// RUN: touch %t/bridgingHeader.h
|
||||
//
|
||||
// Make sure the proper warning is emitted:
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -num-threads 2 %t/main.swift -import-objc-header %t/bridgingHeader.h -### 2>&1 | %FileCheck %s
|
||||
//
|
||||
// CHECK: ignoring -num-threads argument; cannot multithread batch mode
|
||||
//
|
||||
// Make sure that it actually works. (The link step fails if -num-threads is not ignored.)
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -num-threads 2 -c %t/main.swift -import-objc-header %t/bridgingHeader.h -o %t/main.o
|
||||
@@ -1,5 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: echo 'print("Hello, World!")' >%t/main.swift
|
||||
// RUN: touch %t/bridgingHeader.h
|
||||
//
|
||||
// RUN: %swiftc_driver -driver-filelist-threshold=0 -enable-batch-mode -num-threads 2 -c %t/main.swift -import-objc-header %t/bridgingHeader.h -o %t/main.o
|
||||
@@ -1,9 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: echo 'print("Hello, World!")' >%t/main.swift
|
||||
// RUN: touch %t/file-01.swift %t/file-02.swift %t/file-03.swift
|
||||
//
|
||||
// Ensure that the supplementary output filelist argument is passed to the frontend:
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-batch-mode -driver-filelist-threshold=0 -j2 %t/main.swift %t/file-01.swift %t/file-02.swift %t/file-03.swift -o %t/file-01.o -o %t/file-02.o -o %t/file-03.o -### | %FileCheck %s -check-prefix=CHECK-SUPPLEMENTARY-OUTPUT-FILELIST
|
||||
//
|
||||
// CHECK-SUPPLEMENTARY-OUTPUT-FILELIST: -supplementary-output-file-map {{.*(/|\\)}}supplementaryOutputs-
|
||||
@@ -1,12 +0,0 @@
|
||||
// REQUIRES: executable_test
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: echo 'print("Hello, World!")' >%t/main.swift
|
||||
// RUN: touch %t/𝔼-file-01.swift %t/😂-file-02.swift %t/Ω-file-03.swift
|
||||
//
|
||||
// Ensure that the supplementary output filelist argument is passed to the frontend.
|
||||
// Also use some characters outside the BMP.
|
||||
//
|
||||
// RUN: %target-build-swift -emit-dependencies -serialize-diagnostics -driver-filelist-threshold=0 -j2 %t/main.swift %t/𝔼-file-01.swift %t/😂-file-02.swift %t/Ω-file-03.swift -o %t/a.out
|
||||
// RUN: %target-codesign %t/a.out
|
||||
// RUN: %target-run %t/a.out | %FileCheck %s -check-prefix=CHECK-HELLO-WORLD
|
||||
// CHECK-HELLO-WORLD: Hello, World!
|
||||
@@ -1,27 +0,0 @@
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: echo 'print("Hello, World!")' >%t/main.swift
|
||||
// RUN: echo "" > %t/𝔼-file-01.swift
|
||||
// RUN: echo "" > %t/😂-file-02.swift
|
||||
// RUN: echo "" > %t/Ω-file-03.swift
|
||||
//
|
||||
// If the supplementary output file map does not escape the characters in the
|
||||
// source files, the frontend won't recognize the desired outputs.
|
||||
//
|
||||
// RUN: cd %t && %target-build-swift -c -emit-dependencies -serialize-diagnostics -driver-filelist-threshold=0 -j2 main.swift @%S/Inputs/unicode-filenames.rsp -module-name mod
|
||||
//
|
||||
// All these files should exist and should successfully be deleted
|
||||
// RUN: rm main.d
|
||||
// RUN: rm main.dia
|
||||
// RUN: rm main.o
|
||||
// RUN: rm Ω-file-03.d
|
||||
// RUN: rm Ω-file-03.dia
|
||||
// RUN: rm Ω-file-03.o
|
||||
// RUN: rm Ω-file-03.swift
|
||||
// RUN: rm 𝔼-file-01.d
|
||||
// RUN: rm 𝔼-file-01.dia
|
||||
// RUN: rm 𝔼-file-01.o
|
||||
// RUN: rm 𝔼-file-01.swift
|
||||
// RUN: rm 😂-file-02.d
|
||||
// RUN: rm 😂-file-02.dia
|
||||
// RUN: rm 😂-file-02.o
|
||||
// RUN: rm 😂-file-02.swift
|
||||
@@ -47,11 +47,6 @@
|
||||
// RUN: %target-build-swift -typecheck -driver-print-jobs -import-objc-header %S/Inputs/bridging-header.h -pch-output-dir %t/pch -whole-module-optimization -disable-bridging-pch %s 2>&1 | %FileCheck %s -check-prefix=PERSISTENT-DISABLED-YESPCHJOB
|
||||
// PERSISTENT-DISABLED-YESPCHJOB-NOT: -pch-output-dir
|
||||
|
||||
// RUN: %target-build-swift -typecheck -driver-print-jobs -import-objc-header %S/Inputs/bridging-header.h -pch-output-dir %t/pch %s 2>&1 | %FileCheck %s -check-prefix=PERSISTENT-YESPCHJOB
|
||||
// RUN: %target-build-swift -typecheck -driver-print-jobs -import-objc-header %S/Inputs/bridging-header.h -pch-output-dir %t/pch %s %S/Inputs/error.swift -driver-batch-count 2 -enable-batch-mode 2>&1 | %FileCheck %s -check-prefix=PERSISTENT-YESPCHJOB
|
||||
// PERSISTENT-YESPCHJOB: {{.*}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?"?}} -frontend {{.*}} -emit-pch -pch-output-dir {{.*}}/pch
|
||||
// PERSISTENT-YESPCHJOB: {{.*}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?"?}} -frontend {{.*}} -import-objc-header {{.*}}bridging-header.h{{"?}} -pch-output-dir {{.*}}/pch{{"?}} -pch-disable-validation
|
||||
|
||||
// RUN: %target-build-swift -typecheck -driver-print-jobs -import-objc-header %S/Inputs/bridging-header.h -pch-output-dir %t/pch -serialize-diagnostics %s 2>&1 | %FileCheck %s -check-prefix=PERSISTENT-YESPCHJOB-DIAG1
|
||||
// PERSISTENT-YESPCHJOB-DIAG1: {{.*}}swift{{(c|c-legacy-driver|-frontend)?(\.exe)?"?}} -frontend {{.*}} -serialize-diagnostics-path {{.*}}bridging-header-{{.*}}.dia{{"?}} {{.*}} -emit-pch -pch-output-dir {{.*}}/pch
|
||||
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
// RUN: not %target-swiftc_driver -color-diagnostics -diagnostic-style=llvm -emit-executable -o %t %s 2>&1 \
|
||||
// RUN: | %FileCheck -check-prefix=CHECK-CD %s
|
||||
// CHECK-CD: [0m1 = 2{{$}}
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: not %target-swiftc_driver -color-diagnostics -diagnostic-style=llvm -emit-executable -o %t/out %s
|
||||
|
||||
|
||||
// RUN: not %target-swiftc_driver -no-color-diagnostics -emit-executable -o %t %s 2>&1 \
|
||||
// RUN: | %FileCheck -check-prefix=CHECK-NCD %s
|
||||
// CHECK-NCD: {{[ ]}}1 = 2
|
||||
|
||||
1 = 2
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
// RUN: not %target-build-swift %S/Inputs/error.swift %s 2>&1 | %FileCheck %s
|
||||
// RUN: not %target-build-swift -continue-building-after-errors %S/Inputs/error.swift %s 2>&1 | %FileCheck -check-prefix=CHECK-CONTINUE %s
|
||||
|
||||
// Check that batch mode implies -continue-building-after-errors.
|
||||
// RUN: touch %t.empty.swift
|
||||
// RUN: not %target-build-swift -enable-batch-mode -j2 %S/Inputs/error.swift %S/../Inputs/empty.swift %s %t.empty.swift 2>&1 | %FileCheck -check-prefix=CHECK-BATCH %s
|
||||
|
||||
// CHECK: self.bar = self.bar
|
||||
// CHECK-NOT: self.baz = self.baz
|
||||
// CHECK-CONTINUE: self.bar = self.bar
|
||||
|
||||
@@ -4,7 +4,3 @@
|
||||
// RUN: %swiftc_driver -driver-force-response-files -typecheck %S/../Inputs/empty.swift -### 2>&1 | %FileCheck %s
|
||||
// CHECK: @
|
||||
// CHECK: .resp
|
||||
|
||||
// RUN: %swiftc_driver -enable-batch-mode -driver-force-response-files -typecheck %S/../Inputs/empty.swift -### 2>&1 | %FileCheck %s -check-prefix=BATCH
|
||||
// BATCH: @
|
||||
// BATCH: .resp
|
||||
|
||||
@@ -4,7 +4,3 @@
|
||||
// WMO-NOT: -primary-file
|
||||
// RUN: %swiftc_driver -whole-module-optimization -no-whole-module-optimization %S/../Inputs/empty.swift -### 2>&1 | %FileCheck -check-prefix NO-WMO %s
|
||||
// NO-WMO: -primary-file
|
||||
|
||||
// RUN: %swiftc_driver -enable-batch-mode -whole-module-optimization -no-whole-module-optimization %S/../Inputs/empty.swift -### 2>&1 | %FileCheck -check-prefix BATCH %s
|
||||
// BATCH: -primary-file
|
||||
// BATCH-NOT: warning: ignoring '-enable-batch-mode' because '-whole-module-optimization' was also specified
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
// When -enable-only-one-dependency-file (which is the default) the one compile job with the dependency output is unbatchable.
|
||||
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/f_1_1.swift %t/f_1_2.swift %t/f_1_3.swift %t/f_1_4.swift %t/f_1_5.swift %t/f_1_6.swift %t/f_1_7.swift %t/f_1_8.swift %t/f_1_9.swift %t/f_1_10.swift
|
||||
// RUN: touch %t/f_2_1.swift %t/f_2_2.swift %t/f_2_3.swift %t/f_2_4.swift %t/f_2_5.swift %t/f_2_6.swift %t/f_2_7.swift %t/f_2_8.swift %t/f_2_9.swift %t/f_2_10.swift
|
||||
// RUN: touch %t/f_3_1.swift %t/f_3_2.swift %t/f_3_3.swift %t/f_3_4.swift %t/f_3_5.swift %t/f_3_6.swift %t/f_3_7.swift %t/f_3_8.swift %t/f_3_9.swift %t/f_3_10.swift
|
||||
// RUN: touch %t/f_4_1.swift %t/f_4_2.swift %t/f_4_3.swift %t/f_4_4.swift %t/f_4_5.swift %t/f_4_6.swift %t/f_4_7.swift %t/f_4_8.swift %t/f_4_9.swift %t/f_4_10.swift
|
||||
// RUN: touch %t/f_5_1.swift %t/f_5_2.swift %t/f_5_3.swift %t/f_5_4.swift %t/f_5_5.swift %t/f_5_6.swift %t/f_5_7.swift %t/f_5_8.swift %t/f_5_9.swift %t/f_5_10.swift
|
||||
// RUN: touch %t/f_6_1.swift %t/f_6_2.swift %t/f_6_3.swift %t/f_6_4.swift %t/f_6_5.swift %t/f_6_6.swift %t/f_6_7.swift %t/f_6_8.swift %t/f_6_9.swift %t/f_6_10.swift
|
||||
// RUN: touch %t/f_7_1.swift %t/f_7_2.swift %t/f_7_3.swift %t/f_7_4.swift %t/f_7_5.swift %t/f_7_6.swift %t/f_7_7.swift %t/f_7_8.swift %t/f_7_9.swift %t/f_7_10.swift
|
||||
// RUN: touch %t/f_8_1.swift %t/f_8_2.swift %t/f_8_3.swift %t/f_8_4.swift %t/f_8_5.swift %t/f_8_6.swift %t/f_8_7.swift %t/f_8_8.swift %t/f_8_9.swift %t/f_8_10.swift
|
||||
// RUN: touch %t/f_9_1.swift %t/f_9_2.swift %t/f_9_3.swift %t/f_9_4.swift %t/f_9_5.swift %t/f_9_6.swift %t/f_9_7.swift %t/f_9_8.swift %t/f_9_9.swift %t/f_9_10.swift
|
||||
// RUN: touch %t/f_10_1.swift %t/f_10_2.swift %t/f_10_3.swift %t/f_10_4.swift %t/f_10_5.swift %t/f_10_6.swift %t/f_10_7.swift %t/f_10_8.swift %t/f_10_9.swift %t/f_10_10.swift
|
||||
// RUN: %swiftc_driver -disable-only-one-dependency-file -driver-show-job-lifecycle -v -c -module-name foo -emit-module -serialize-diagnostics -emit-dependencies -j 1 -enable-batch-mode %t/f_*.swift >%t/out.txt 2>&1
|
||||
|
||||
// RUN: %FileCheck %s -check-prefix=CHECK-DIS <%t/out.txt
|
||||
// CHECK-DIS-NOT: unable to execute command
|
||||
// CHECK-DIS: Forming into 4 batches
|
||||
// CHECK-DIS-DAG: Forming batch job from 25 constituents
|
||||
// CHECK-DIS-DAG: Forming batch job from 25 constituents
|
||||
// CHECK-DIS-DAG: Forming batch job from 25 constituents
|
||||
// CHECK-DIS-DAG: Forming batch job from 25 constituents
|
||||
|
||||
|
||||
// RUN: %swiftc_driver -disable-only-one-dependency-file -driver-show-job-lifecycle -driver-batch-size-limit 10 -v -c -module-name foo -emit-module -serialize-diagnostics -emit-dependencies -j 1 -enable-batch-mode %t/f_*.swift >%t/out2.txt 2>&1
|
||||
// RUN: %FileCheck %s <%t/out2.txt -check-prefix=EXPLICIT-ARG-DIS
|
||||
// EXPLICIT-ARG-DIS-NOT: unable to execute command
|
||||
// EXPLICIT-ARG-DIS: Forming into 10 batches
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-DIS-DAG: Forming batch job from 10 constituents
|
||||
|
||||
|
||||
// When -enable-only-one-dependency-file (which is the default) the one compile job with the dependency output is unbatchable.
|
||||
|
||||
// RUN: %empty-directory(%t)
|
||||
// RUN: touch %t/f_1_1.swift %t/f_1_2.swift %t/f_1_3.swift %t/f_1_4.swift %t/f_1_5.swift %t/f_1_6.swift %t/f_1_7.swift %t/f_1_8.swift %t/f_1_9.swift %t/f_1_10.swift
|
||||
// RUN: touch %t/f_2_1.swift %t/f_2_2.swift %t/f_2_3.swift %t/f_2_4.swift %t/f_2_5.swift %t/f_2_6.swift %t/f_2_7.swift %t/f_2_8.swift %t/f_2_9.swift %t/f_2_10.swift
|
||||
// RUN: touch %t/f_3_1.swift %t/f_3_2.swift %t/f_3_3.swift %t/f_3_4.swift %t/f_3_5.swift %t/f_3_6.swift %t/f_3_7.swift %t/f_3_8.swift %t/f_3_9.swift %t/f_3_10.swift
|
||||
// RUN: touch %t/f_4_1.swift %t/f_4_2.swift %t/f_4_3.swift %t/f_4_4.swift %t/f_4_5.swift %t/f_4_6.swift %t/f_4_7.swift %t/f_4_8.swift %t/f_4_9.swift %t/f_4_10.swift
|
||||
// RUN: touch %t/f_5_1.swift %t/f_5_2.swift %t/f_5_3.swift %t/f_5_4.swift %t/f_5_5.swift %t/f_5_6.swift %t/f_5_7.swift %t/f_5_8.swift %t/f_5_9.swift %t/f_5_10.swift
|
||||
// RUN: touch %t/f_6_1.swift %t/f_6_2.swift %t/f_6_3.swift %t/f_6_4.swift %t/f_6_5.swift %t/f_6_6.swift %t/f_6_7.swift %t/f_6_8.swift %t/f_6_9.swift %t/f_6_10.swift
|
||||
// RUN: touch %t/f_7_1.swift %t/f_7_2.swift %t/f_7_3.swift %t/f_7_4.swift %t/f_7_5.swift %t/f_7_6.swift %t/f_7_7.swift %t/f_7_8.swift %t/f_7_9.swift %t/f_7_10.swift
|
||||
// RUN: touch %t/f_8_1.swift %t/f_8_2.swift %t/f_8_3.swift %t/f_8_4.swift %t/f_8_5.swift %t/f_8_6.swift %t/f_8_7.swift %t/f_8_8.swift %t/f_8_9.swift %t/f_8_10.swift
|
||||
// RUN: touch %t/f_9_1.swift %t/f_9_2.swift %t/f_9_3.swift %t/f_9_4.swift %t/f_9_5.swift %t/f_9_6.swift %t/f_9_7.swift %t/f_9_8.swift %t/f_9_9.swift %t/f_9_10.swift
|
||||
// RUN: touch %t/f_10_1.swift %t/f_10_2.swift %t/f_10_3.swift %t/f_10_4.swift %t/f_10_5.swift %t/f_10_6.swift %t/f_10_7.swift %t/f_10_8.swift %t/f_10_9.swift %t/f_10_10.swift
|
||||
// RUN: %swiftc_driver -enable-only-one-dependency-file -driver-show-job-lifecycle -v -c -module-name foo -emit-module -serialize-diagnostics -emit-dependencies -j 1 -enable-batch-mode %t/f_*.swift >%t/out.txt 2>&1
|
||||
// RUN: %FileCheck %s -check-prefix=CHECK-ENA <%t/out.txt
|
||||
// CHECK-ENA-NOT: unable to execute command
|
||||
// CHECK-ENA: Forming into 4 batches
|
||||
// CHECK-ENA-DAG: Forming batch job from 25 constituents
|
||||
// CHECK-ENA-DAG: Forming batch job from 25 constituents
|
||||
// CHECK-ENA-DAG: Forming batch job from 25 constituents
|
||||
// CHECK-ENA-DAG: Forming batch job from 24 constituents
|
||||
//
|
||||
// RUN: %swiftc_driver -enable-only-one-dependency-file -driver-show-job-lifecycle -driver-batch-size-limit 10 -v -c -module-name foo -emit-module -serialize-diagnostics -emit-dependencies -j 1 -enable-batch-mode %t/f_*.swift >%t/out2.txt 2>&1
|
||||
// RUN: %FileCheck %s <%t/out2.txt -check-prefix=EXPLICIT-ARG-ENA
|
||||
// EXPLICIT-ARG-ENA-NOT: unable to execute command
|
||||
// EXPLICIT-ARG-ENA: Forming into 10 batches
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 10 constituents
|
||||
// EXPLICIT-ARG-ENA-DAG: Forming batch job from 9 constituents
|
||||
Reference in New Issue
Block a user