From bc3eee24ddc8287e143240de0027c6f025559cb9 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 19 Mar 2019 16:11:46 +0100 Subject: [PATCH 01/22] servo model Xm430w350 added --- src/dynamixel/servos/ax12.hpp | 6 +- src/dynamixel/servos/ax12w.hpp | 6 +- src/dynamixel/servos/ax18.hpp | 6 +- src/dynamixel/servos/base_servo.hpp | 7 +- src/dynamixel/servos/ex106.hpp | 6 +- src/dynamixel/servos/mx106.hpp | 6 +- src/dynamixel/servos/mx106_p2.hpp | 6 +- src/dynamixel/servos/mx12.hpp | 6 +- src/dynamixel/servos/mx28.hpp | 6 +- src/dynamixel/servos/mx28_p2.hpp | 6 +- src/dynamixel/servos/mx64.hpp | 6 +- src/dynamixel/servos/mx64_p2.hpp | 6 +- src/dynamixel/servos/pro_h42_20_s300.hpp | 6 +- src/dynamixel/servos/pro_h54_100_s500.hpp | 6 +- src/dynamixel/servos/pro_h54_200_s500.hpp | 6 +- src/dynamixel/servos/pro_l42_10_s300.hpp | 2 +- src/dynamixel/servos/pro_l54_30_s400.hpp | 6 +- src/dynamixel/servos/pro_l54_30_s500.hpp | 6 +- src/dynamixel/servos/pro_l54_50_s290.hpp | 6 +- src/dynamixel/servos/pro_l54_50_s500.hpp | 6 +- src/dynamixel/servos/pro_m42_10_s260.hpp | 6 +- src/dynamixel/servos/pro_m54_40_s250.hpp | 6 +- src/dynamixel/servos/pro_m54_60_s250.hpp | 6 +- src/dynamixel/servos/servo.hpp | 58 ++++++++++++-- src/dynamixel/servos/xm430_w350.hpp | 92 ++++++++++++++--------- 25 files changed, 177 insertions(+), 108 deletions(-) diff --git a/src/dynamixel/servos/ax12.hpp b/src/dynamixel/servos/ax12.hpp index 36f77c07..91a9efb0 100644 --- a/src/dynamixel/servos/ax12.hpp +++ b/src/dynamixel/servos/ax12.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -116,7 +116,7 @@ namespace dynamixel { READ_WRITE_FIELD(lock); READ_WRITE_FIELD(punch); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/ax12w.hpp b/src/dynamixel/servos/ax12w.hpp index a2711c65..3664d27c 100644 --- a/src/dynamixel/servos/ax12w.hpp +++ b/src/dynamixel/servos/ax12w.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -114,7 +114,7 @@ namespace dynamixel { READ_WRITE_FIELD(lock); READ_WRITE_FIELD(punch); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/ax18.hpp b/src/dynamixel/servos/ax18.hpp index 3ca5c069..e6c2ffd5 100644 --- a/src/dynamixel/servos/ax18.hpp +++ b/src/dynamixel/servos/ax18.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -114,7 +114,7 @@ namespace dynamixel { READ_WRITE_FIELD(lock); READ_WRITE_FIELD(punch); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/base_servo.hpp b/src/dynamixel/servos/base_servo.hpp index 2fc469b5..c53e60df 100644 --- a/src/dynamixel/servos/base_servo.hpp +++ b/src/dynamixel/servos/base_servo.hpp @@ -1,8 +1,8 @@ #ifndef DYNAMIXEL_SERVOS_BASE_SERVO_HPP_ #define DYNAMIXEL_SERVOS_BASE_SERVO_HPP_ -#include "../instruction_packet.hpp" #include "../errors/error.hpp" +#include "../instruction_packet.hpp" #define BASE_FIELD(Name) \ virtual InstructionPacket get_##Name() const \ @@ -193,6 +193,11 @@ namespace dynamixel { throw errors::Error("parse_present_position_angle not implemented in model"); } + // virtual InstructionPacket get_current_positions_all(std::vector ids) const + // { + // throw errors::Error("get_current_positions_all not implemented in model"); + // } + // ================================================================= // Speed-specific diff --git a/src/dynamixel/servos/ex106.hpp b/src/dynamixel/servos/ex106.hpp index 0023b904..7aebbb66 100644 --- a/src/dynamixel/servos/ex106.hpp +++ b/src/dynamixel/servos/ex106.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -120,7 +120,7 @@ namespace dynamixel { READ_WRITE_FIELD(punch); READ_FIELD(present_current); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx106.hpp b/src/dynamixel/servos/mx106.hpp index 68340783..d734e922 100644 --- a/src/dynamixel/servos/mx106.hpp +++ b/src/dynamixel/servos/mx106.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_WRITE_FIELD(goal_torque); READ_WRITE_FIELD(goal_acceleration); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx106_p2.hpp b/src/dynamixel/servos/mx106_p2.hpp index df52eb53..1b572005 100644 --- a/src/dynamixel/servos/mx106_p2.hpp +++ b/src/dynamixel/servos/mx106_p2.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -174,7 +174,7 @@ namespace dynamixel { READ_FIELD(speed_trajectory); READ_FIELD(position_trajectory); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx12.hpp b/src/dynamixel/servos/mx12.hpp index 88491ecc..a99e5c49 100644 --- a/src/dynamixel/servos/mx12.hpp +++ b/src/dynamixel/servos/mx12.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -120,7 +120,7 @@ namespace dynamixel { READ_WRITE_FIELD(punch); READ_WRITE_FIELD(goal_acceleration); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx28.hpp b/src/dynamixel/servos/mx28.hpp index 26dffb48..33ab90d7 100644 --- a/src/dynamixel/servos/mx28.hpp +++ b/src/dynamixel/servos/mx28.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -120,7 +120,7 @@ namespace dynamixel { READ_WRITE_FIELD(punch); READ_WRITE_FIELD(goal_acceleration); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx28_p2.hpp b/src/dynamixel/servos/mx28_p2.hpp index 5b63e477..841cfdf4 100644 --- a/src/dynamixel/servos/mx28_p2.hpp +++ b/src/dynamixel/servos/mx28_p2.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -174,7 +174,7 @@ namespace dynamixel { READ_FIELD(speed_trajectory); READ_FIELD(position_trajectory); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx64.hpp b/src/dynamixel/servos/mx64.hpp index 344278dd..99e8abf5 100644 --- a/src/dynamixel/servos/mx64.hpp +++ b/src/dynamixel/servos/mx64.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol1.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -129,7 +129,7 @@ namespace dynamixel { READ_WRITE_FIELD(goal_torque); READ_WRITE_FIELD(goal_acceleration); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/mx64_p2.hpp b/src/dynamixel/servos/mx64_p2.hpp index 559939c3..30788f84 100644 --- a/src/dynamixel/servos/mx64_p2.hpp +++ b/src/dynamixel/servos/mx64_p2.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -174,7 +174,7 @@ namespace dynamixel { READ_FIELD(speed_trajectory); READ_FIELD(position_trajectory); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_h42_20_s300.hpp b/src/dynamixel/servos/pro_h42_20_s300.hpp index d341c67e..7eec5601 100644 --- a/src/dynamixel/servos/pro_h42_20_s300.hpp +++ b/src/dynamixel/servos/pro_h42_20_s300.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_h54_100_s500.hpp b/src/dynamixel/servos/pro_h54_100_s500.hpp index 2168903d..cb1b7262 100644 --- a/src/dynamixel/servos/pro_h54_100_s500.hpp +++ b/src/dynamixel/servos/pro_h54_100_s500.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_h54_200_s500.hpp b/src/dynamixel/servos/pro_h54_200_s500.hpp index 907bb4bb..47c98ca9 100644 --- a/src/dynamixel/servos/pro_h54_200_s500.hpp +++ b/src/dynamixel/servos/pro_h54_200_s500.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_l42_10_s300.hpp b/src/dynamixel/servos/pro_l42_10_s300.hpp index bac68074..fd13ef5f 100644 --- a/src/dynamixel/servos/pro_l42_10_s300.hpp +++ b/src/dynamixel/servos/pro_l42_10_s300.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { diff --git a/src/dynamixel/servos/pro_l54_30_s400.hpp b/src/dynamixel/servos/pro_l54_30_s400.hpp index 57a1db9a..d710d090 100644 --- a/src/dynamixel/servos/pro_l54_30_s400.hpp +++ b/src/dynamixel/servos/pro_l54_30_s400.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_l54_30_s500.hpp b/src/dynamixel/servos/pro_l54_30_s500.hpp index f7e03be7..8b9ff071 100644 --- a/src/dynamixel/servos/pro_l54_30_s500.hpp +++ b/src/dynamixel/servos/pro_l54_30_s500.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_l54_50_s290.hpp b/src/dynamixel/servos/pro_l54_50_s290.hpp index 7c0fca58..c6c4f64a 100644 --- a/src/dynamixel/servos/pro_l54_50_s290.hpp +++ b/src/dynamixel/servos/pro_l54_50_s290.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_l54_50_s500.hpp b/src/dynamixel/servos/pro_l54_50_s500.hpp index c81b91cb..3892ca9b 100644 --- a/src/dynamixel/servos/pro_l54_50_s500.hpp +++ b/src/dynamixel/servos/pro_l54_50_s500.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_m42_10_s260.hpp b/src/dynamixel/servos/pro_m42_10_s260.hpp index 3d79f76c..fdb83107 100644 --- a/src/dynamixel/servos/pro_m42_10_s260.hpp +++ b/src/dynamixel/servos/pro_m42_10_s260.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_m54_40_s250.hpp b/src/dynamixel/servos/pro_m54_40_s250.hpp index dd973d59..139bb5dc 100644 --- a/src/dynamixel/servos/pro_m54_40_s250.hpp +++ b/src/dynamixel/servos/pro_m54_40_s250.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/pro_m54_60_s250.hpp b/src/dynamixel/servos/pro_m54_60_s250.hpp index 6bf9c88a..ab7b17c7 100644 --- a/src/dynamixel/servos/pro_m54_60_s250.hpp +++ b/src/dynamixel/servos/pro_m54_60_s250.hpp @@ -3,8 +3,8 @@ #include -#include "servo.hpp" #include "../protocols/protocol2.hpp" +#include "servo.hpp" namespace dynamixel { namespace servos { @@ -132,7 +132,7 @@ namespace dynamixel { READ_FIELD(present_current); READ_FIELD(hardware_error_status); }; - } -} + } // namespace servos +} // namespace dynamixel #endif diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 0b0aac16..e5a1cf08 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -206,13 +206,31 @@ namespace dynamixel { return Model::parse_present_position_angle(this->_id, st); } + // static InstructionPacket::protocol_t> get_current_positions_all(typename Servo::protocol_t::id_t id, const std::vector ids) + // { + // typename Servo::ct_t::present_position_t pos; + // std::vector address; + // std::vector data_length; + // for (size_t i = 0; i < ids.size(); i++) { + // address.push_back(pos); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) + // data_length.push_back(sizeof(pos)); + // } + // return bulk_read_t(_get_typed(ids), address, data_length); + // } + // + // InstructionPacket::protocol_t> get_current_positions_all(const std::vector ids) const override + // { + // return Model::get_current_positions_all(this->_id, ids); + // } + // Sync operations. Only works if the models are known and they are all the same + // use case : std::make_shared(0)->set_goal_positions(ids, angles)); replace MODEL_SERVO by Mx28 or Xl320... template static InstructionPacket set_goal_positions(const std::vector& ids, const std::vector& pos) { std::vector final_pos; for (size_t j = 0; j < pos.size(); j++) - final_pos.push_back(((pos[j] * 57.2958 - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); + final_pos.push_back((((pos[j] * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); if (ids.size() != final_pos.size()) throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); std::vector> packed(final_pos.size()); @@ -224,7 +242,7 @@ namespace dynamixel { // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same template - static InstructionPacket get_current_positions(const std::vector& ids) + static InstructionPacket get_current_positions_MX(const std::vector& ids) { std::vector address; std::vector data_length; @@ -232,7 +250,33 @@ namespace dynamixel { address.push_back(0x24); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) data_length.push_back(0x02); } - return bulk_read_t(address, _get_typed(ids), data_length); + return bulk_read_t(_get_typed(ids), address, data_length); + } + + // Bulk operations. Only works for XL models with protocol 2. Only works if the models are known and they are all the same + template + static InstructionPacket get_current_positions_XL(const std::vector& ids) + { + std::vector address; + std::vector data_length; + for (size_t i = 0; i < ids.size(); i++) { + address.push_back(0x25); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) + data_length.push_back(0x02); + } + return bulk_read_t(_get_typed(ids), address, data_length); + } + + // Bulk operations. Only works for XM models with protocol 2. Only works if the models are known and they are all the same + template + static InstructionPacket get_current_positions_XM(const std::vector& ids) + { + std::vector address; + std::vector data_length; + for (size_t i = 0; i < ids.size(); i++) { + address.push_back(0x84); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) + data_length.push_back(0x04); + } + return bulk_read_t(_get_typed(ids), address, data_length); } // ================================================================= @@ -331,14 +375,14 @@ namespace dynamixel { static InstructionPacket get_current_speed(const std::vector& ids) { //std::vector address; - std::vector address; //uint8_t + std::vector address; //uint8_t - std::vector data_length; //uint8_t + std::vector data_length; //uint8_t for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x26); // 0x27 for XL and 0x26 for MX + address.push_back(0x27); // 0x27 for XL and 0x26 for MX data_length.push_back(0x02); } - return bulk_read_t(address, _get_typed(ids), data_length); + return bulk_read_t(_get_typed(ids), address, data_length); } // ================================================================= diff --git a/src/dynamixel/servos/xm430_w350.hpp b/src/dynamixel/servos/xm430_w350.hpp index 6cb25cd8..1cdff2a6 100644 --- a/src/dynamixel/servos/xm430_w350.hpp +++ b/src/dynamixel/servos/xm430_w350.hpp @@ -1,6 +1,11 @@ #ifndef DYNAMIXEL_SERVOS_XM430W350_HPP_ #define DYNAMIXEL_SERVOS_XM430W350_HPP_ +/* TODO : check why realtime_tick, profile_acceleration, profile_velocity didnt work. +READ_WRITE_FIELD(profile_acceleration); +READ_WRITE_FIELD(profile_velocity); +*/ + #include #include "../protocols/protocol2.hpp" @@ -18,8 +23,8 @@ namespace dynamixel { static const protocol_t::address_t model_number = 0; typedef uint16_t model_number_t; static const model_number_t model_number_value = 1020; - static const protocol_t::address_t model_information = 2; - typedef uint32_t model_information_t; + static const protocol_t::address_t model_info = 2; + typedef uint32_t model_info_t; static const protocol_t::address_t firmware_version = 6; typedef uint8_t firmware_version_t; static const protocol_t::address_t id = 7; @@ -40,12 +45,12 @@ namespace dynamixel { typedef uint32_t homing_offset_t; static const protocol_t::address_t moving_threshold = 24; typedef uint32_t moving_threshold_t; - static const protocol_t::address_t temperature_limit = 31; - typedef uint8_t temperature_limit_t; - static const protocol_t::address_t max_voltage_limit = 32; - typedef uint16_t max_voltage_limit_t; - static const protocol_t::address_t min_voltage_limit = 34; - typedef uint16_t min_voltage_limit_t; + static const protocol_t::address_t highest_temperature_limit = 31; // temperature_limit + typedef uint8_t highest_temperature_limit_t; + static const protocol_t::address_t highest_voltage_limit = 32; //max_voltage_limit + typedef uint16_t highest_voltage_limit_t; + static const protocol_t::address_t lowest_voltage_limit = 34; //min_voltage_limit + typedef uint16_t lowest_voltage_limit_t; static const protocol_t::address_t pwm_limit = 36; typedef uint16_t pwm_limit_t; static const protocol_t::address_t current_limit = 38; @@ -56,17 +61,17 @@ namespace dynamixel { typedef uint32_t max_position_limit_t; static const protocol_t::address_t min_position_limit = 52; typedef uint32_t min_position_limit_t; - static const protocol_t::address_t shutdown = 63; - typedef uint8_t shutdown_t; - static const shutdown_t shutdown_value = 52; + static const protocol_t::address_t alarm_shutdown = 63; //shutdown + typedef uint8_t alarm_shutdown_t; + static const alarm_shutdown_t alarm_shutdown_value = 52; static const protocol_t::address_t torque_enable = 64; typedef uint8_t torque_enable_t; static const protocol_t::address_t led = 65; typedef uint8_t led_t; static const protocol_t::address_t status_return_level = 68; typedef uint8_t status_return_level_t; - static const protocol_t::address_t registered_instruction = 69; - typedef uint8_t registered_instruction_t; + static const protocol_t::address_t registered = 69; // registered_instruction + typedef uint8_t registered_t; static const protocol_t::address_t hardware_error_status = 70; typedef uint8_t hardware_error_status_t; static const protocol_t::address_t velocity_i_gain = 76; @@ -87,14 +92,22 @@ namespace dynamixel { typedef uint8_t bus_watchdog_t; static const protocol_t::address_t goal_pwm = 100; typedef uint16_t goal_pwm_t; - static const protocol_t::address_t goal_velocity = 104; - typedef uint32_t goal_velocity_t; + static const protocol_t::address_t moving_speed = 104; // goal_velocity + typedef uint32_t moving_speed_t; + static const moving_speed_t min_goal_speed = 0; + static const moving_speed_t max_goal_speed = 1023; + static constexpr double rpm_per_tick = 0.111; + static const bool speed_sign_bit = true; static const protocol_t::address_t profile_acceleration = 108; typedef uint32_t profil_acceleration_t; static const protocol_t::address_t profile_velocity = 112; typedef uint32_t profil_velocity_t; static const protocol_t::address_t goal_position = 116; typedef uint32_t goal_position_t; + static const goal_position_t min_goal_position = 0; + static const goal_position_t max_goal_position = 4095; + static const uint16_t min_goal_angle_deg = 30; + static const uint16_t max_goal_angle_deg = 330; static const protocol_t::address_t realtime_tick = 120; typedef uint16_t realtime_tick_t; static const protocol_t::address_t moving = 122; @@ -105,16 +118,16 @@ namespace dynamixel { typedef uint16_t present_pwm_t; static const protocol_t::address_t present_load = 126; typedef uint16_t present_load_t; - static const protocol_t::address_t present_velocity = 128; - typedef uint32_t present_veclocity_t; + static const protocol_t::address_t present_speed = 128; // present_velocity + typedef uint32_t present_speed_t; static const protocol_t::address_t present_position = 132; typedef uint32_t present_position_t; static const protocol_t::address_t velocity_trajectory = 136; typedef uint32_t velocity_trajectory_t; static const protocol_t::address_t position_trajectory = 140; typedef uint32_t position_trajectory_t; - static const protocol_t::address_t present_input_voltage = 144; - typedef uint16_t present_input_voltage_t; + static const protocol_t::address_t present_voltage = 144; // present_input_voltage + typedef uint16_t present_voltage_t; static const protocol_t::address_t present_temperature = 146; typedef uint16_t present_temperature_t; }; @@ -132,21 +145,16 @@ namespace dynamixel { READ_WRITE_FIELD(drive_mode); READ_WRITE_FIELD(operating_mode); READ_WRITE_FIELD(secondary_id); - READ_WRITE_FIELD(protocol_version); READ_WRITE_FIELD(homing_offset); READ_WRITE_FIELD(moving_threshold); - READ_WRITE_FIELD(temperature_limit); - READ_WRITE_FIELD(max_voltage_limit); - READ_WRITE_FIELD(min_voltage_limit); + READ_WRITE_FIELD(pwm_limit); - READ_WRITE_FIELD(velocity_limit); + //READ_WRITE_FIELD(velocity_limit); READ_WRITE_FIELD(max_position_limit); READ_WRITE_FIELD(min_position_limit); - READ_WRITE_FIELD(shutdown); + READ_WRITE_FIELD(velocity_i_gain); READ_WRITE_FIELD(velocity_p_gain); - READ_WRITE_FIELD(position_d_gain); - READ_WRITE_FIELD(position_i_gain); READ_WRITE_FIELD(protocol_version); READ_WRITE_FIELD(led); READ_WRITE_FIELD(position_d_gain); @@ -156,21 +164,33 @@ namespace dynamixel { READ_WRITE_FIELD(feedforward_1st_gain); READ_WRITE_FIELD(bus_watchdog); READ_WRITE_FIELD(goal_pwm); - READ_WRITE_FIELD(goal_velocity); - READ_WRITE_FIELD(profile_acceleration); - READ_WRITE_FIELD(profile_velocity); - READ_FIELD(model_information); - READ_FIELD(registered_instruction); + + READ_FIELD(model_info); + READ_FIELD(hardware_error_status); - READ_FIELD(realtime_tick); + READ_FIELD(moving_status); READ_FIELD(present_pwm); READ_FIELD(present_load); - READ_FIELD(present_velocity); - READ_FIELD(velocity_trajectory); + + //READ_FIELD(velocity_trajectory); READ_FIELD(position_trajectory); - READ_FIELD(present_input_voltage); + + READ_FIELD(model_number); + READ_FIELD(firmware_version); + READ_WRITE_FIELD(id); + READ_WRITE_FIELD(baudrate); + READ_WRITE_FIELD(return_delay_time); + READ_WRITE_FIELD(status_return_level); + + READ_WRITE_FIELD(torque_enable); + READ_WRITE_FIELD(goal_position); + READ_FIELD(present_position); READ_FIELD(present_temperature); + READ_FIELD(moving); + READ_FIELD(realtime_tick); + //READ_WRITE_FIELD(profile_acceleration); + // READ_WRITE_FIELD(profile_velocity); }; } // namespace servos } // namespace dynamixel From 16f182967f69dc88d1c2316994e60d85914e29ae Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 19 Mar 2019 16:12:59 +0100 Subject: [PATCH 02/22] some function with bulk_read and sync_write added (XM,MX,XL models) --- src/dynamixel/servos.hpp | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/src/dynamixel/servos.hpp b/src/dynamixel/servos.hpp index 9dd01ae1..fbb3eaca 100644 --- a/src/dynamixel/servos.hpp +++ b/src/dynamixel/servos.hpp @@ -5,24 +5,25 @@ #include "servos/ax12w.hpp" #include "servos/ax18.hpp" #include "servos/ex106.hpp" +#include "servos/mx106.hpp" +#include "servos/mx106_p2.hpp" #include "servos/mx12.hpp" #include "servos/mx28.hpp" #include "servos/mx28_p2.hpp" #include "servos/mx64.hpp" #include "servos/mx64_p2.hpp" -#include "servos/mx106.hpp" -#include "servos/mx106_p2.hpp" -#include "servos/xl320.hpp" -#include "servos/pro_h54_200_s500.hpp" -#include "servos/pro_h54_100_s500.hpp" #include "servos/pro_h42_20_s300.hpp" -#include "servos/pro_m54_60_s250.hpp" -#include "servos/pro_m54_40_s250.hpp" -#include "servos/pro_m42_10_s260.hpp" +#include "servos/pro_h54_100_s500.hpp" +#include "servos/pro_h54_200_s500.hpp" +#include "servos/pro_l42_10_s300.hpp" #include "servos/pro_l54_30_s400.hpp" #include "servos/pro_l54_30_s500.hpp" #include "servos/pro_l54_50_s290.hpp" #include "servos/pro_l54_50_s500.hpp" -#include "servos/pro_l42_10_s300.hpp" +#include "servos/pro_m42_10_s260.hpp" +#include "servos/pro_m54_40_s250.hpp" +#include "servos/pro_m54_60_s250.hpp" +#include "servos/xl320.hpp" +#include "servos/xm430_w350.hpp" #endif From 024c5200c7b3683c1174d51bbf2801e30caaba01 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 19 Mar 2019 16:14:10 +0100 Subject: [PATCH 03/22] Xm430w350 added in auto_detect --- src/dynamixel/auto_detect.hpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/dynamixel/auto_detect.hpp b/src/dynamixel/auto_detect.hpp index 536a4b5a..244932bb 100644 --- a/src/dynamixel/auto_detect.hpp +++ b/src/dynamixel/auto_detect.hpp @@ -1,8 +1,8 @@ #ifndef DYNAMIXEL_AUTO_DETECT_HPP_ #define DYNAMIXEL_AUTO_DETECT_HPP_ -#include #include +#include #include "errors/error.hpp" #include "servos.hpp" @@ -35,6 +35,8 @@ namespace dynamixel { inline std::shared_ptr> get_servo(protocols::Protocol2::id_t id, uint16_t model, protocols::Protocol2::address_t selected_protocol) { switch (model) { + case servos::Xm430W350::ct_t::model_number_value: + return std::make_shared(id); case servos::Xl320::ct_t::model_number_value: return std::make_shared(id); case servos::ProL4210S300::ct_t::model_number_value: From 29154d5f0a2f605af82f3ec34599e87dbb78104c Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 19 Mar 2019 16:15:28 +0100 Subject: [PATCH 04/22] bulk_read and sync_write fully implemented for protocol 1 and 2 --- .../waflib/Build.py | 759 ++++++++++++++++ .../waflib/ConfigSet.py | 153 ++++ .../waflib/Configure.py | 379 ++++++++ .../waflib/Context.py | 394 +++++++++ .../waflib/Errors.py | 37 + .../waflib/Logs.py | 200 +++++ .../waflib/Node.py | 491 +++++++++++ .../waflib/Options.py | 147 ++++ .../waflib/Runner.py | 207 +++++ .../waflib/Scripting.py | 407 +++++++++ .../waflib/Task.py | 686 +++++++++++++++ .../waflib/TaskGen.py | 433 ++++++++++ .../waflib/Tools/__init__.py | 4 + .../waflib/Tools/ar.py | 13 + .../waflib/Tools/asm.py | 24 + .../waflib/Tools/bison.py | 28 + .../waflib/Tools/c.py | 26 + .../waflib/Tools/c_aliases.py | 63 ++ .../waflib/Tools/c_config.py | 758 ++++++++++++++++ .../waflib/Tools/c_osx.py | 137 +++ .../waflib/Tools/c_preproc.py | 611 +++++++++++++ .../waflib/Tools/c_tests.py | 152 ++++ .../waflib/Tools/ccroot.py | 447 ++++++++++ .../waflib/Tools/clang.py | 20 + .../waflib/Tools/clangxx.py | 20 + .../waflib/Tools/compiler_c.py | 40 + .../waflib/Tools/compiler_cxx.py | 40 + .../waflib/Tools/compiler_d.py | 37 + .../waflib/Tools/compiler_fc.py | 39 + .../waflib/Tools/cs.py | 132 +++ .../waflib/Tools/cxx.py | 26 + .../waflib/Tools/d.py | 54 ++ .../waflib/Tools/d_config.py | 52 ++ .../waflib/Tools/d_scan.py | 133 +++ .../waflib/Tools/dbus.py | 29 + .../waflib/Tools/dmd.py | 51 ++ .../waflib/Tools/errcheck.py | 163 ++++ .../waflib/Tools/fc.py | 115 +++ .../waflib/Tools/fc_config.py | 286 +++++++ .../waflib/Tools/fc_scan.py | 64 ++ .../waflib/Tools/flex.py | 32 + .../waflib/Tools/g95.py | 54 ++ .../waflib/Tools/gas.py | 12 + .../waflib/Tools/gcc.py | 102 +++ .../waflib/Tools/gdc.py | 35 + .../waflib/Tools/gfortran.py | 68 ++ .../waflib/Tools/glib2.py | 234 +++++ .../waflib/Tools/gnu_dirs.py | 66 ++ .../waflib/Tools/gxx.py | 102 +++ .../waflib/Tools/icc.py | 22 + .../waflib/Tools/icpc.py | 22 + .../waflib/Tools/ifort.py | 48 ++ .../waflib/Tools/intltool.py | 97 +++ .../waflib/Tools/irixcc.py | 45 + .../waflib/Tools/javaw.py | 305 +++++++ .../waflib/Tools/kde4.py | 48 ++ .../waflib/Tools/ldc2.py | 36 + .../waflib/Tools/lua.py | 18 + .../waflib/Tools/msvc.py | 809 ++++++++++++++++++ .../waflib/Tools/nasm.py | 16 + .../waflib/Tools/perl.py | 90 ++ .../waflib/Tools/python.py | 399 +++++++++ .../waflib/Tools/qt4.py | 442 ++++++++++ .../waflib/Tools/qt5.py | 489 +++++++++++ .../waflib/Tools/ruby.py | 103 +++ .../waflib/Tools/suncc.py | 46 + .../waflib/Tools/suncxx.py | 46 + .../waflib/Tools/tex.py | 317 +++++++ .../waflib/Tools/vala.py | 211 +++++ .../waflib/Tools/waf_unit_test.py | 106 +++ .../waflib/Tools/winres.py | 85 ++ .../waflib/Tools/xlc.py | 43 + .../waflib/Tools/xlcxx.py | 43 + .../waflib/Utils.py | 468 ++++++++++ .../waflib/__init__.py | 4 + .../waflib/ansiterm.py | 238 ++++++ .../waflib/extras/__init__.py | 4 + .../waflib/extras/compat15.py | 301 +++++++ .../waflib/fixpy2.py | 53 ++ src/dynamixel/controllers/usb2dynamixel.hpp | 26 +- src/dynamixel/instructions/bulk_read.hpp | 91 +- src/dynamixel/instructions/sync_write.hpp | 8 +- src/tools/utility.hpp | 11 +- 83 files changed, 13617 insertions(+), 35 deletions(-) create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/__init__.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ar.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/asm.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/bison.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_aliases.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_config.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/dbus.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/dmd.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gxx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/icc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/icpc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ifort.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/python.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py create mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py new file mode 100644 index 00000000..a696a46c --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py @@ -0,0 +1,759 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,sys,errno,re,shutil,stat +try: + import cPickle +except ImportError: + import pickle as cPickle +from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors +import waflib.Node +CACHE_DIR='c4che' +CACHE_SUFFIX='_cache.py' +INSTALL=1337 +UNINSTALL=-1337 +SAVED_ATTRS='root node_deps raw_deps task_sigs'.split() +CFG_FILES='cfg_files' +POST_AT_ONCE=0 +POST_LAZY=1 +POST_BOTH=2 +PROTOCOL=-1 +if sys.platform=='cli': + PROTOCOL=0 +class BuildContext(Context.Context): + '''executes the build''' + cmd='build' + variant='' + def __init__(self,**kw): + super(BuildContext,self).__init__(**kw) + self.is_install=0 + self.top_dir=kw.get('top_dir',Context.top_dir) + self.run_dir=kw.get('run_dir',Context.run_dir) + self.post_mode=POST_AT_ONCE + self.out_dir=kw.get('out_dir',Context.out_dir) + self.cache_dir=kw.get('cache_dir',None) + if not self.cache_dir: + self.cache_dir=os.path.join(self.out_dir,CACHE_DIR) + self.all_envs={} + self.task_sigs={} + self.node_deps={} + self.raw_deps={} + self.cache_dir_contents={} + self.task_gen_cache_names={} + self.launch_dir=Context.launch_dir + self.jobs=Options.options.jobs + self.targets=Options.options.targets + self.keep=Options.options.keep + self.progress_bar=Options.options.progress_bar + self.deps_man=Utils.defaultdict(list) + self.current_group=0 + self.groups=[] + self.group_names={} + def get_variant_dir(self): + if not self.variant: + return self.out_dir + return os.path.join(self.out_dir,self.variant) + variant_dir=property(get_variant_dir,None) + def __call__(self,*k,**kw): + kw['bld']=self + ret=TaskGen.task_gen(*k,**kw) + self.task_gen_cache_names={} + self.add_to_group(ret,group=kw.get('group',None)) + return ret + def rule(self,*k,**kw): + def f(rule): + ret=self(*k,**kw) + ret.rule=rule + return ret + return f + def __copy__(self): + raise Errors.WafError('build contexts are not supposed to be copied') + def install_files(self,*k,**kw): + pass + def install_as(self,*k,**kw): + pass + def symlink_as(self,*k,**kw): + pass + def load_envs(self): + node=self.root.find_node(self.cache_dir) + if not node: + raise Errors.WafError('The project was not configured: run "waf configure" first!') + lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True) + if not lst: + raise Errors.WafError('The cache directory is empty: reconfigure the project') + for x in lst: + name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/') + env=ConfigSet.ConfigSet(x.abspath()) + self.all_envs[name]=env + for f in env[CFG_FILES]: + newnode=self.root.find_resource(f) + try: + h=Utils.h_file(newnode.abspath()) + except(IOError,AttributeError): + Logs.error('cannot find %r'%f) + h=Utils.SIG_NIL + newnode.sig=h + def init_dirs(self): + if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): + raise Errors.WafError('The project was not configured: run "waf configure" first!') + self.path=self.srcnode=self.root.find_dir(self.top_dir) + self.bldnode=self.root.make_node(self.variant_dir) + self.bldnode.mkdir() + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.execute_build() + def execute_build(self): + Logs.info("Waf: Entering directory `%s'"%self.variant_dir) + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + try: + self.compile() + finally: + if self.progress_bar==1 and sys.stderr.isatty(): + c=len(self.returned_tasks)or 1 + m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL) + Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on}) + Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) + self.post_build() + def restore(self): + try: + env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) + except EnvironmentError: + pass + else: + if env['version']').ljust(cols) + msg=Logs.indicator%(left,bar,right) + return msg + def declare_chain(self,*k,**kw): + return TaskGen.declare_chain(*k,**kw) + def pre_build(self): + for m in getattr(self,'pre_funs',[]): + m(self) + def post_build(self): + for m in getattr(self,'post_funs',[]): + m(self) + def add_pre_fun(self,meth): + try: + self.pre_funs.append(meth) + except AttributeError: + self.pre_funs=[meth] + def add_post_fun(self,meth): + try: + self.post_funs.append(meth) + except AttributeError: + self.post_funs=[meth] + def get_group(self,x): + if not self.groups: + self.add_group() + if x is None: + return self.groups[self.current_group] + if x in self.group_names: + return self.group_names[x] + return self.groups[x] + def add_to_group(self,tgen,group=None): + assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase)) + tgen.bld=self + self.get_group(group).append(tgen) + def get_group_name(self,g): + if not isinstance(g,list): + g=self.groups[g] + for x in self.group_names: + if id(self.group_names[x])==id(g): + return x + return'' + def get_group_idx(self,tg): + se=id(tg) + for i in range(len(self.groups)): + for t in self.groups[i]: + if id(t)==se: + return i + return None + def add_group(self,name=None,move=True): + if name and name in self.group_names: + Logs.error('add_group: name %s already present'%name) + g=[] + self.group_names[name]=g + self.groups.append(g) + if move: + self.current_group=len(self.groups)-1 + def set_group(self,idx): + if isinstance(idx,str): + g=self.group_names[idx] + for i in range(len(self.groups)): + if id(g)==id(self.groups[i]): + self.current_group=i + break + else: + self.current_group=idx + def total(self): + total=0 + for group in self.groups: + for tg in group: + try: + total+=len(tg.tasks) + except AttributeError: + total+=1 + return total + def get_targets(self): + to_post=[] + min_grp=0 + for name in self.targets.split(','): + tg=self.get_tgen_by_name(name) + m=self.get_group_idx(tg) + if m>min_grp: + min_grp=m + to_post=[tg] + elif m==min_grp: + to_post.append(tg) + return(min_grp,to_post) + def get_all_task_gen(self): + lst=[] + for g in self.groups: + lst.extend(g) + return lst + def post_group(self): + if self.targets=='*': + for tg in self.groups[self.cur]: + try: + f=tg.post + except AttributeError: + pass + else: + f() + elif self.targets: + if self.cur259 and not tgt.startswith('\\\\?\\'): + tgt='\\\\?\\'+tgt + shutil.copy2(src,tgt) + os.chmod(tgt,kw.get('chmod',Utils.O644)) + def do_install(self,src,tgt,**kw): + d,_=os.path.split(tgt) + if not d: + raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) + Utils.check_dir(d) + srclbl=src.replace(self.srcnode.abspath()+os.sep,'') + if not Options.options.force: + try: + st1=os.stat(tgt) + st2=os.stat(src) + except OSError: + pass + else: + if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: + if not self.progress_bar: + Logs.info('- install %s (from %s)'%(tgt,srclbl)) + return False + if not self.progress_bar: + Logs.info('+ install %s (from %s)'%(tgt,srclbl)) + try: + os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) + except EnvironmentError: + pass + try: + os.remove(tgt) + except OSError: + pass + try: + self.copy_fun(src,tgt,**kw) + except IOError: + try: + os.stat(src) + except EnvironmentError: + Logs.error('File %r does not exist'%src) + raise Errors.WafError('Could not install the file %r'%tgt) + def do_link(self,src,tgt,**kw): + d,_=os.path.split(tgt) + Utils.check_dir(d) + link=False + if not os.path.islink(tgt): + link=True + elif os.readlink(tgt)!=src: + link=True + if link: + try:os.remove(tgt) + except OSError:pass + if not self.progress_bar: + Logs.info('+ symlink %s (to %s)'%(tgt,src)) + os.symlink(src,tgt) + else: + if not self.progress_bar: + Logs.info('- symlink %s (to %s)'%(tgt,src)) + def run_task_now(self,tsk,postpone): + tsk.post() + if not postpone: + if tsk.runnable_status()==Task.ASK_LATER: + raise self.WafError('cannot post the task %r'%tsk) + tsk.run() + tsk.hasrun=True + def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True,task=None): + assert(dest) + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + tsk.task=task + if isinstance(files,waflib.Node.Node): + tsk.source=[files] + else: + tsk.source=Utils.to_list(files) + tsk.dest=dest + tsk.exec_task=tsk.exec_install_files + tsk.relative_trick=relative_trick + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True,task=None): + assert(dest) + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.path=cwd or self.path + tsk.chmod=chmod + tsk.source=[srcfile] + tsk.task=task + tsk.dest=dest + tsk.exec_task=tsk.exec_install_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk + def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False,task=None): + if Utils.is_win32: + return + assert(dest) + tsk=inst(env=env or self.env) + tsk.bld=self + tsk.dest=dest + tsk.path=cwd or self.path + tsk.source=[] + tsk.task=task + tsk.link=src + tsk.relative_trick=relative_trick + tsk.exec_task=tsk.exec_symlink_as + if add:self.add_to_group(tsk) + self.run_task_now(tsk,postpone) + return tsk +class UninstallContext(InstallContext): + '''removes the targets installed''' + cmd='uninstall' + def __init__(self,**kw): + super(UninstallContext,self).__init__(**kw) + self.is_install=UNINSTALL + def rm_empty_dirs(self,tgt): + while tgt: + tgt=os.path.dirname(tgt) + try: + os.rmdir(tgt) + except OSError: + break + def do_install(self,src,tgt,**kw): + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + self.uninstall.append(tgt) + try: + os.remove(tgt) + except OSError as e: + if e.errno!=errno.ENOENT: + if not getattr(self,'uninstall_error',None): + self.uninstall_error=True + Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') + if Logs.verbose>1: + Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno)) + self.rm_empty_dirs(tgt) + def do_link(self,src,tgt,**kw): + try: + if not self.progress_bar: + Logs.info('- remove %s'%tgt) + os.remove(tgt) + except OSError: + pass + self.rm_empty_dirs(tgt) + def execute(self): + try: + def runnable_status(self): + return Task.SKIP_ME + setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status) + setattr(Task.Task,'runnable_status',runnable_status) + super(UninstallContext,self).execute() + finally: + setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back) +class CleanContext(BuildContext): + '''cleans the project''' + cmd='clean' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + try: + self.clean() + finally: + self.store() + def clean(self): + Logs.debug('build: clean called') + if self.bldnode!=self.srcnode: + lst=[] + for e in self.all_envs.values(): + lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES]) + for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True): + if n in lst: + continue + n.delete() + self.root.children={} + for v in'node_deps task_sigs raw_deps'.split(): + setattr(self,v,{}) +class ListContext(BuildContext): + '''lists the targets to execute''' + cmd='list' + def execute(self): + self.restore() + if not self.all_envs: + self.load_envs() + self.recurse([self.run_dir]) + self.pre_build() + self.timer=Utils.Timer() + for g in self.groups: + for tg in g: + try: + f=tg.post + except AttributeError: + pass + else: + f() + try: + self.get_tgen_by_name('') + except Exception: + pass + lst=list(self.task_gen_cache_names.keys()) + lst.sort() + for k in lst: + Logs.pprint('GREEN',k) +class StepContext(BuildContext): + '''executes tasks in a step-by-step fashion, for debugging''' + cmd='step' + def __init__(self,**kw): + super(StepContext,self).__init__(**kw) + self.files=Options.options.files + def compile(self): + if not self.files: + Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') + BuildContext.compile(self) + return + targets=None + if self.targets and self.targets!='*': + targets=self.targets.split(',') + for g in self.groups: + for tg in g: + if targets and tg.name not in targets: + continue + try: + f=tg.post + except AttributeError: + pass + else: + f() + for pat in self.files.split(','): + matcher=self.get_matcher(pat) + for tg in g: + if isinstance(tg,Task.TaskBase): + lst=[tg] + else: + lst=tg.tasks + for tsk in lst: + do_exec=False + for node in getattr(tsk,'inputs',[]): + if matcher(node,output=False): + do_exec=True + break + for node in getattr(tsk,'outputs',[]): + if matcher(node,output=True): + do_exec=True + break + if do_exec: + ret=tsk.run() + Logs.info('%s -> exit %r'%(str(tsk),ret)) + def get_matcher(self,pat): + inn=True + out=True + if pat.startswith('in:'): + out=False + pat=pat.replace('in:','') + elif pat.startswith('out:'): + inn=False + pat=pat.replace('out:','') + anode=self.root.find_node(pat) + pattern=None + if not anode: + if not pat.startswith('^'): + pat='^.+?%s'%pat + if not pat.endswith('$'): + pat='%s$'%pat + pattern=re.compile(pat) + def match(node,output): + if output==True and not out: + return False + if output==False and not inn: + return False + if anode: + return anode==node + else: + return pattern.match(node.abspath()) + return match diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py new file mode 100644 index 00000000..3a86efec --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py @@ -0,0 +1,153 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Logs,Utils +re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) +class ConfigSet(object): + __slots__=('table','parent') + def __init__(self,filename=None): + self.table={} + if filename: + self.load(filename) + def __contains__(self,key): + if key in self.table:return True + try:return self.parent.__contains__(key) + except AttributeError:return False + def keys(self): + keys=set() + cur=self + while cur: + keys.update(cur.table.keys()) + cur=getattr(cur,'parent',None) + keys=list(keys) + keys.sort() + return keys + def __str__(self): + return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()]) + def __getitem__(self,key): + try: + while 1: + x=self.table.get(key,None) + if not x is None: + return x + self=self.parent + except AttributeError: + return[] + def __setitem__(self,key,value): + self.table[key]=value + def __delitem__(self,key): + self[key]=[] + def __getattr__(self,name): + if name in self.__slots__: + return object.__getattr__(self,name) + else: + return self[name] + def __setattr__(self,name,value): + if name in self.__slots__: + object.__setattr__(self,name,value) + else: + self[name]=value + def __delattr__(self,name): + if name in self.__slots__: + object.__delattr__(self,name) + else: + del self[name] + def derive(self): + newenv=ConfigSet() + newenv.parent=self + return newenv + def detach(self): + tbl=self.get_merged_dict() + try: + delattr(self,'parent') + except AttributeError: + pass + else: + keys=tbl.keys() + for x in keys: + tbl[x]=copy.deepcopy(tbl[x]) + self.table=tbl + return self + def get_flat(self,key): + s=self[key] + if isinstance(s,str):return s + return' '.join(s) + def _get_list_value_for_modification(self,key): + try: + value=self.table[key] + except KeyError: + try:value=self.parent[key] + except AttributeError:value=[] + if isinstance(value,list): + value=value[:] + else: + value=[value] + else: + if not isinstance(value,list): + value=[value] + self.table[key]=value + return value + def append_value(self,var,val): + if isinstance(val,str): + val=[val] + current_value=self._get_list_value_for_modification(var) + current_value.extend(val) + def prepend_value(self,var,val): + if isinstance(val,str): + val=[val] + self.table[var]=val+self._get_list_value_for_modification(var) + def append_unique(self,var,val): + if isinstance(val,str): + val=[val] + current_value=self._get_list_value_for_modification(var) + for x in val: + if x not in current_value: + current_value.append(x) + def get_merged_dict(self): + table_list=[] + env=self + while 1: + table_list.insert(0,env.table) + try:env=env.parent + except AttributeError:break + merged_table={} + for table in table_list: + merged_table.update(table) + return merged_table + def store(self,filename): + try: + os.makedirs(os.path.split(filename)[0]) + except OSError: + pass + buf=[] + merged_table=self.get_merged_dict() + keys=list(merged_table.keys()) + keys.sort() + try: + fun=ascii + except NameError: + fun=repr + for k in keys: + if k!='undo_stack': + buf.append('%s = %s\n'%(k,fun(merged_table[k]))) + Utils.writef(filename,''.join(buf)) + def load(self,filename): + tbl=self.table + code=Utils.readf(filename,m='rU') + for m in re_imp.finditer(code): + g=m.group + tbl[g(2)]=eval(g(3)) + Logs.debug('env: %s'%str(self.table)) + def update(self,d): + for k,v in d.items(): + self[k]=v + def stash(self): + orig=self.table + tbl=self.table=self.table.copy() + for x in tbl.keys(): + tbl[x]=copy.deepcopy(tbl[x]) + self.undo_stack=self.undo_stack+[orig] + def revert(self): + self.table=self.undo_stack.pop(-1) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py new file mode 100644 index 00000000..41d77206 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py @@ -0,0 +1,379 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,shlex,sys,time,re,shutil +from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors +BREAK='break' +CONTINUE='continue' +WAF_CONFIG_LOG='config.log' +autoconfig=False +conf_template='''# project %(app)s configured on %(now)s by +# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) +# using %(args)s +#''' +class ConfigurationContext(Context.Context): + '''configures the project''' + cmd='configure' + error_handlers=[] + def __init__(self,**kw): + super(ConfigurationContext,self).__init__(**kw) + self.environ=dict(os.environ) + self.all_envs={} + self.top_dir=None + self.out_dir=None + self.tools=[] + self.hash=0 + self.files=[] + self.tool_cache=[] + self.setenv('') + def setenv(self,name,env=None): + if name not in self.all_envs or env: + if not env: + env=ConfigSet.ConfigSet() + self.prepare_env(env) + else: + env=env.derive() + self.all_envs[name]=env + self.variant=name + def get_env(self): + return self.all_envs[self.variant] + def set_env(self,val): + self.all_envs[self.variant]=val + env=property(get_env,set_env) + def init_dirs(self): + top=self.top_dir + if not top: + top=Options.options.top + if not top: + top=getattr(Context.g_module,Context.TOP,None) + if not top: + top=self.path.abspath() + top=os.path.abspath(top) + self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top) + assert(self.srcnode) + out=self.out_dir + if not out: + out=Options.options.out + if not out: + out=getattr(Context.g_module,Context.OUT,None) + if not out: + out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','') + out=os.path.realpath(out) + self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out) + self.bldnode.mkdir() + if not os.path.isdir(self.bldnode.abspath()): + conf.fatal('Could not create the build directory %s'%self.bldnode.abspath()) + def execute(self): + self.init_dirs() + self.cachedir=self.bldnode.make_node(Build.CACHE_DIR) + self.cachedir.mkdir() + path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG) + self.logger=Logs.make_logger(path,'cfg') + app=getattr(Context.g_module,'APPNAME','') + if app: + ver=getattr(Context.g_module,'VERSION','') + if ver: + app="%s (%s)"%(app,ver) + params={'now':time.ctime(),'pyver':sys.hexversion,'systype':sys.platform,'args':" ".join(sys.argv),'wafver':Context.WAFVERSION,'abi':Context.ABI,'app':app} + self.to_log(conf_template%params) + self.msg('Setting top to',self.srcnode.abspath()) + self.msg('Setting out to',self.bldnode.abspath()) + if id(self.srcnode)==id(self.bldnode): + Logs.warn('Setting top == out (remember to use "update_outputs")') + elif id(self.path)!=id(self.srcnode): + if self.srcnode.is_child_of(self.path): + Logs.warn('Are you certain that you do not want to set top="." ?') + super(ConfigurationContext,self).execute() + self.store() + Context.top_dir=self.srcnode.abspath() + Context.out_dir=self.bldnode.abspath() + env=ConfigSet.ConfigSet() + env['argv']=sys.argv + env['options']=Options.options.__dict__ + env.run_dir=Context.run_dir + env.top_dir=Context.top_dir + env.out_dir=Context.out_dir + env['hash']=self.hash + env['files']=self.files + env['environ']=dict(self.environ) + if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options,'no_lock_in_run'): + env.store(os.path.join(Context.run_dir,Options.lockfile)) + if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options,'no_lock_in_top'): + env.store(os.path.join(Context.top_dir,Options.lockfile)) + if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options,'no_lock_in_out'): + env.store(os.path.join(Context.out_dir,Options.lockfile)) + def prepare_env(self,env): + if not env.PREFIX: + if Options.options.prefix or Utils.is_win32: + env.PREFIX=Utils.sane_path(Options.options.prefix) + else: + env.PREFIX='' + if not env.BINDIR: + if Options.options.bindir: + env.BINDIR=Utils.sane_path(Options.options.bindir) + else: + env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env) + if not env.LIBDIR: + if Options.options.libdir: + env.LIBDIR=Utils.sane_path(Options.options.libdir) + else: + env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env) + def store(self): + n=self.cachedir.make_node('build.config.py') + n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools)) + if not self.all_envs: + self.fatal('nothing to store in the configuration context!') + for key in self.all_envs: + tmpenv=self.all_envs[key] + tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX)) + def load(self,input,tooldir=None,funs=None,with_sys_path=True): + tools=Utils.to_list(input) + if tooldir:tooldir=Utils.to_list(tooldir) + for tool in tools: + mag=(tool,id(self.env),tooldir,funs) + if mag in self.tool_cache: + self.to_log('(tool %s is already loaded, skipping)'%tool) + continue + self.tool_cache.append(mag) + module=None + try: + module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path) + except ImportError as e: + self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e)) + except Exception as e: + self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs)) + self.to_log(Utils.ex_stack()) + raise + if funs is not None: + self.eval_rules(funs) + else: + func=getattr(module,'configure',None) + if func: + if type(func)is type(Utils.readf):func(self) + else:self.eval_rules(func) + self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) + def post_recurse(self,node): + super(ConfigurationContext,self).post_recurse(node) + self.hash=Utils.h_list((self.hash,node.read('rb'))) + self.files.append(node.abspath()) + def eval_rules(self,rules): + self.rules=Utils.to_list(rules) + for x in self.rules: + f=getattr(self,x) + if not f:self.fatal("No such method '%s'."%x) + try: + f() + except Exception as e: + ret=self.err_handler(x,e) + if ret==BREAK: + break + elif ret==CONTINUE: + continue + else: + raise + def err_handler(self,fun,error): + pass +def conf(f): + def fun(*k,**kw): + mandatory=True + if'mandatory'in kw: + mandatory=kw['mandatory'] + del kw['mandatory'] + try: + return f(*k,**kw) + except Errors.ConfigurationError: + if mandatory: + raise + fun.__name__=f.__name__ + setattr(ConfigurationContext,f.__name__,fun) + setattr(Build.BuildContext,f.__name__,fun) + return f +@conf +def add_os_flags(self,var,dest=None,dup=True): + try: + flags=shlex.split(self.environ[var]) + except KeyError: + return + if dup or''.join(flags)not in''.join(Utils.to_list(self.env[dest or var])): + self.env.append_value(dest or var,flags) +@conf +def cmd_to_list(self,cmd): + if isinstance(cmd,str)and cmd.find(' '): + try: + os.stat(cmd) + except OSError: + return shlex.split(cmd) + else: + return[cmd] + return cmd +@conf +def check_waf_version(self,mini='1.7.99',maxi='1.9.0',**kw): + self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw) + ver=Context.HEXVERSION + if Utils.num2ver(mini)>ver: + self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver)) + if Utils.num2ver(maxi) %r'%(filename,path_list,var,ret)) + if not ret: + self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename) + interpreter=kw.get('interpreter',None) + if interpreter is None: + if not Utils.check_exe(ret[0],env=environ): + self.fatal('Program %r is not executable'%ret) + self.env[var]=ret + else: + self.env[var]=self.env[interpreter]+ret + return ret +@conf +def find_binary(self,filenames,exts,paths): + for f in filenames: + for ext in exts: + exe_name=f+ext + if os.path.isabs(exe_name): + if os.path.isfile(exe_name): + return exe_name + else: + for path in paths: + x=os.path.expanduser(os.path.join(path,exe_name)) + if os.path.isfile(x): + return x + return None +@conf +def run_build(self,*k,**kw): + lst=[str(v)for(p,v)in kw.items()if p!='env'] + h=Utils.h_list(lst) + dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h) + try: + os.makedirs(dir) + except OSError: + pass + try: + os.stat(dir) + except OSError: + self.fatal('cannot use the configuration test folder %r'%dir) + cachemode=getattr(Options.options,'confcache',None) + if cachemode==1: + try: + proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build')) + except OSError: + pass + except IOError: + pass + else: + ret=proj['cache_run_build'] + if isinstance(ret,str)and ret.startswith('Test does not build'): + self.fatal(ret) + return ret + bdir=os.path.join(dir,'testbuild') + if not os.path.exists(bdir): + os.makedirs(bdir) + self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir) + bld.init_dirs() + bld.progress_bar=0 + bld.targets='*' + bld.logger=self.logger + bld.all_envs.update(self.all_envs) + bld.env=kw['env'] + bld.kw=kw + bld.conf=self + kw['build_fun'](bld) + ret=-1 + try: + try: + bld.compile() + except Errors.WafError: + ret='Test does not build: %s'%Utils.ex_stack() + self.fatal(ret) + else: + ret=getattr(bld,'retval',0) + finally: + if cachemode==1: + proj=ConfigSet.ConfigSet() + proj['cache_run_build']=ret + proj.store(os.path.join(dir,'cache_run_build')) + else: + shutil.rmtree(dir) + return ret +@conf +def ret_msg(self,msg,args): + if isinstance(msg,str): + return msg + return msg(args) +@conf +def test(self,*k,**kw): + if not'env'in kw: + kw['env']=self.env.derive() + if kw.get('validate',None): + kw['validate'](kw) + self.start_msg(kw['msg'],**kw) + ret=None + try: + ret=self.run_build(*k,**kw) + except self.errors.ConfigurationError: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + kw['success']=ret + if kw.get('post_check',None): + ret=kw['post_check'](kw) + if ret: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + self.fatal('The configuration failed %r'%ret) + else: + self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + return ret diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py new file mode 100644 index 00000000..89cead61 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py @@ -0,0 +1,394 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,imp,sys +from waflib import Utils,Errors,Logs +import waflib.Node +HEXVERSION=0x1081200 +WAFVERSION="1.8.18" +WAFREVISION="62efd566280f494cdf4d7940aac85f2b59ef660b" +ABI=98 +DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI) +APPNAME='APPNAME' +VERSION='VERSION' +TOP='top' +OUT='out' +WSCRIPT_FILE='wscript' +launch_dir='' +run_dir='' +top_dir='' +out_dir='' +waf_dir='' +local_repo='' +remote_repo='https://raw.githubusercontent.com/waf-project/waf/master/' +remote_locs=['waflib/extras','waflib/Tools'] +g_module=None +STDOUT=1 +STDERR=-1 +BOTH=0 +classes=[] +def create_context(cmd_name,*k,**kw): + global classes + for x in classes: + if x.cmd==cmd_name: + return x(*k,**kw) + ctx=Context(*k,**kw) + ctx.fun=cmd_name + return ctx +class store_context(type): + def __init__(cls,name,bases,dict): + super(store_context,cls).__init__(name,bases,dict) + name=cls.__name__ + if name=='ctx'or name=='Context': + return + try: + cls.cmd + except AttributeError: + raise Errors.WafError('Missing command for the context class %r (cmd)'%name) + if not getattr(cls,'fun',None): + cls.fun=cls.cmd + global classes + classes.insert(0,cls) +ctx=store_context('ctx',(object,),{}) +class Context(ctx): + errors=Errors + tools={} + def __init__(self,**kw): + try: + rd=kw['run_dir'] + except KeyError: + global run_dir + rd=run_dir + self.node_class=type("Nod3",(waflib.Node.Node,),{}) + self.node_class.__module__="waflib.Node" + self.node_class.ctx=self + self.root=self.node_class('',None) + self.cur_script=None + self.path=self.root.find_dir(rd) + self.stack_path=[] + self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self} + self.logger=None + def __hash__(self): + return id(self) + def finalize(self): + try: + logger=self.logger + except AttributeError: + pass + else: + Logs.free_logger(logger) + delattr(self,'logger') + def load(self,tool_list,*k,**kw): + tools=Utils.to_list(tool_list) + path=Utils.to_list(kw.get('tooldir','')) + with_sys_path=kw.get('with_sys_path',True) + for t in tools: + module=load_tool(t,path,with_sys_path=with_sys_path) + fun=getattr(module,kw.get('name',self.fun),None) + if fun: + fun(self) + def execute(self): + global g_module + self.recurse([os.path.dirname(g_module.root_path)]) + def pre_recurse(self,node): + self.stack_path.append(self.cur_script) + self.cur_script=node + self.path=node.parent + def post_recurse(self,node): + self.cur_script=self.stack_path.pop() + if self.cur_script: + self.path=self.cur_script.parent + def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None): + try: + cache=self.recurse_cache + except AttributeError: + cache=self.recurse_cache={} + for d in Utils.to_list(dirs): + if not os.path.isabs(d): + d=os.path.join(self.path.abspath(),d) + WSCRIPT=os.path.join(d,WSCRIPT_FILE) + WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun) + node=self.root.find_node(WSCRIPT_FUN) + if node and(not once or node not in cache): + cache[node]=True + self.pre_recurse(node) + try: + function_code=node.read('rU',encoding) + exec(compile(function_code,node.abspath(),'exec'),self.exec_dict) + finally: + self.post_recurse(node) + elif not node: + node=self.root.find_node(WSCRIPT) + tup=(node,name or self.fun) + if node and(not once or tup not in cache): + cache[tup]=True + self.pre_recurse(node) + try: + wscript_module=load_module(node.abspath(),encoding=encoding) + user_function=getattr(wscript_module,(name or self.fun),None) + if not user_function: + if not mandatory: + continue + raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath())) + user_function(self) + finally: + self.post_recurse(node) + elif not node: + if not mandatory: + continue + try: + os.listdir(d) + except OSError: + raise Errors.WafError('Cannot read the folder %r'%d) + raise Errors.WafError('No wscript file in directory %s'%d) + def exec_command(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%(cmd,)) + Logs.debug('runner_env: kw=%s'%kw) + if self.logger: + self.logger.info(cmd) + if'stdout'not in kw: + kw['stdout']=subprocess.PIPE + if'stderr'not in kw: + kw['stderr']=subprocess.PIPE + if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): + raise Errors.WafError("Program %s not found!"%cmd[0]) + wargs={} + if'timeout'in kw: + if kw['timeout']is not None: + wargs['timeout']=kw['timeout'] + del kw['timeout'] + if'input'in kw: + if kw['input']: + wargs['input']=kw['input'] + kw['stdin']=Utils.subprocess.PIPE + del kw['input'] + try: + if kw['stdout']or kw['stderr']: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate(**wargs) + ret=p.returncode + else: + out,err=(None,None) + ret=subprocess.Popen(cmd,**kw).wait(**wargs) + except Exception as e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if out: + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.debug('out: %s'%out) + else: + Logs.info(out,extra={'stream':sys.stdout,'c1':''}) + if err: + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if self.logger: + self.logger.error('err: %s'%err) + else: + Logs.info(err,extra={'stream':sys.stderr,'c1':''}) + return ret + def cmd_and_log(self,cmd,**kw): + subprocess=Utils.subprocess + kw['shell']=isinstance(cmd,str) + Logs.debug('runner: %r'%(cmd,)) + if'quiet'in kw: + quiet=kw['quiet'] + del kw['quiet'] + else: + quiet=None + if'output'in kw: + to_ret=kw['output'] + del kw['output'] + else: + to_ret=STDOUT + if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): + raise Errors.WafError("Program %s not found!"%cmd[0]) + kw['stdout']=kw['stderr']=subprocess.PIPE + if quiet is None: + self.to_log(cmd) + wargs={} + if'timeout'in kw: + if kw['timeout']is not None: + wargs['timeout']=kw['timeout'] + del kw['timeout'] + if'input'in kw: + if kw['input']: + wargs['input']=kw['input'] + kw['stdin']=Utils.subprocess.PIPE + del kw['input'] + try: + p=subprocess.Popen(cmd,**kw) + (out,err)=p.communicate(**wargs) + except Exception as e: + raise Errors.WafError('Execution failure: %s'%str(e),ex=e) + if not isinstance(out,str): + out=out.decode(sys.stdout.encoding or'iso8859-1') + if not isinstance(err,str): + err=err.decode(sys.stdout.encoding or'iso8859-1') + if out and quiet!=STDOUT and quiet!=BOTH: + self.to_log('out: %s'%out) + if err and quiet!=STDERR and quiet!=BOTH: + self.to_log('err: %s'%err) + if p.returncode: + e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode)) + e.returncode=p.returncode + e.stderr=err + e.stdout=out + raise e + if to_ret==BOTH: + return(out,err) + elif to_ret==STDERR: + return err + return out + def fatal(self,msg,ex=None): + if self.logger: + self.logger.info('from %s: %s'%(self.path.abspath(),msg)) + try: + msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename) + except Exception: + pass + raise self.errors.ConfigurationError(msg,ex=ex) + def to_log(self,msg): + if not msg: + return + if self.logger: + self.logger.info(msg) + else: + sys.stderr.write(str(msg)) + sys.stderr.flush() + def msg(self,*k,**kw): + try: + msg=kw['msg'] + except KeyError: + msg=k[0] + self.start_msg(msg,**kw) + try: + result=kw['result'] + except KeyError: + result=k[1] + color=kw.get('color',None) + if not isinstance(color,str): + color=result and'GREEN'or'YELLOW' + self.end_msg(result,color,**kw) + def start_msg(self,*k,**kw): + if kw.get('quiet',None): + return + msg=kw.get('msg',None)or k[0] + try: + if self.in_msg: + self.in_msg+=1 + return + except AttributeError: + self.in_msg=0 + self.in_msg+=1 + try: + self.line_just=max(self.line_just,len(msg)) + except AttributeError: + self.line_just=max(40,len(msg)) + for x in(self.line_just*'-',msg): + self.to_log(x) + Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='') + def end_msg(self,*k,**kw): + if kw.get('quiet',None): + return + self.in_msg-=1 + if self.in_msg: + return + result=kw.get('result',None)or k[0] + defcolor='GREEN' + if result==True: + msg='ok' + elif result==False: + msg='not found' + defcolor='YELLOW' + else: + msg=str(result) + self.to_log(msg) + try: + color=kw['color'] + except KeyError: + if len(k)>1 and k[1]in Logs.colors_lst: + color=k[1] + else: + color=defcolor + Logs.pprint(color,msg) + def load_special_tools(self,var,ban=[]): + global waf_dir + if os.path.isdir(waf_dir): + lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) + for x in lst: + if not x.name in ban: + load_tool(x.name.replace('.py','')) + else: + from zipfile import PyZipFile + waflibs=PyZipFile(waf_dir) + lst=waflibs.namelist() + for x in lst: + if not re.match("waflib/extras/%s"%var.replace("*",".*"),var): + continue + f=os.path.basename(x) + doban=False + for b in ban: + r=b.replace("*",".*") + if re.match(r,f): + doban=True + if not doban: + f=f.replace('.py','') + load_tool(f) +cache_modules={} +def load_module(path,encoding=None): + try: + return cache_modules[path] + except KeyError: + pass + module=imp.new_module(WSCRIPT_FILE) + try: + code=Utils.readf(path,m='rU',encoding=encoding) + except EnvironmentError: + raise Errors.WafError('Could not read the file %r'%path) + module_dir=os.path.dirname(path) + sys.path.insert(0,module_dir) + try:exec(compile(code,path,'exec'),module.__dict__) + finally:sys.path.remove(module_dir) + cache_modules[path]=module + return module +def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True): + if tool=='java': + tool='javaw' + else: + tool=tool.replace('++','xx') + origSysPath=sys.path + if not with_sys_path:sys.path=[] + try: + if tooldir: + assert isinstance(tooldir,list) + sys.path=tooldir+sys.path + try: + __import__(tool) + finally: + for d in tooldir: + sys.path.remove(d) + ret=sys.modules[tool] + Context.tools[tool]=ret + return ret + else: + if not with_sys_path:sys.path.insert(0,waf_dir) + try: + for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'): + try: + __import__(x%tool) + break + except ImportError: + x=None + if x is None: + __import__(tool) + finally: + if not with_sys_path:sys.path.remove(waf_dir) + ret=sys.modules[x%tool] + Context.tools[tool]=ret + return ret + finally: + if not with_sys_path:sys.path+=origSysPath diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py new file mode 100644 index 00000000..3d98c8d2 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import traceback,sys +class WafError(Exception): + def __init__(self,msg='',ex=None): + self.msg=msg + assert not isinstance(msg,Exception) + self.stack=[] + if ex: + if not msg: + self.msg=str(ex) + if isinstance(ex,WafError): + self.stack=ex.stack + else: + self.stack=traceback.extract_tb(sys.exc_info()[2]) + self.stack+=traceback.extract_stack()[:-1] + self.verbose_msg=''.join(traceback.format_list(self.stack)) + def __str__(self): + return str(self.msg) +class BuildError(WafError): + def __init__(self,error_tasks=[]): + self.tasks=error_tasks + WafError.__init__(self,self.format_error()) + def format_error(self): + lst=['Build failed'] + for tsk in self.tasks: + txt=tsk.format_error() + if txt:lst.append(txt) + return'\n'.join(lst) +class ConfigurationError(WafError): + pass +class TaskRescan(WafError): + pass +class TaskNotReady(WafError): + pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py new file mode 100644 index 00000000..984ac1bd --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py @@ -0,0 +1,200 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,traceback,sys +from waflib import Utils,ansiterm +if not os.environ.get('NOSYNC',False): + if sys.stdout.isatty()and id(sys.stdout)==id(sys.__stdout__): + sys.stdout=ansiterm.AnsiTerm(sys.stdout) + if sys.stderr.isatty()and id(sys.stderr)==id(sys.__stderr__): + sys.stderr=ansiterm.AnsiTerm(sys.stderr) +import logging +LOG_FORMAT=os.environ.get('WAF_LOG_FORMAT','%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') +HOUR_FORMAT=os.environ.get('WAF_HOUR_FORMAT','%H:%M:%S') +zones='' +verbose=0 +colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','GREY':'\x1b[37m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',} +indicator='\r\x1b[K%s%s%s' +try: + unicode +except NameError: + unicode=None +def enable_colors(use): + if use==1: + if not(sys.stderr.isatty()or sys.stdout.isatty()): + use=0 + if Utils.is_win32 and os.name!='java': + term=os.environ.get('TERM','') + else: + term=os.environ.get('TERM','dumb') + if term in('dumb','emacs'): + use=0 + if use>=1: + os.environ['TERM']='vt100' + colors_lst['USE']=use +try: + get_term_cols=ansiterm.get_term_cols +except AttributeError: + def get_term_cols(): + return 80 +get_term_cols.__doc__=""" + Get the console width in characters. + + :return: the number of characters per line + :rtype: int + """ +def get_color(cl): + if not colors_lst['USE']:return'' + return colors_lst.get(cl,'') +class color_dict(object): + def __getattr__(self,a): + return get_color(a) + def __call__(self,a): + return get_color(a) +colors=color_dict() +re_log=re.compile(r'(\w+): (.*)',re.M) +class log_filter(logging.Filter): + def __init__(self,name=None): + pass + def filter(self,rec): + rec.zone=rec.module + if rec.levelno>=logging.INFO: + return True + m=re_log.match(rec.msg) + if m: + rec.zone=m.group(1) + rec.msg=m.group(2) + if zones: + return getattr(rec,'zone','')in zones or'*'in zones + elif not verbose>2: + return False + return True +class log_handler(logging.StreamHandler): + def emit(self,record): + try: + try: + self.stream=record.stream + except AttributeError: + if record.levelno>=logging.WARNING: + record.stream=self.stream=sys.stderr + else: + record.stream=self.stream=sys.stdout + self.emit_override(record) + self.flush() + except(KeyboardInterrupt,SystemExit): + raise + except: + self.handleError(record) + def emit_override(self,record,**kw): + self.terminator=getattr(record,'terminator','\n') + stream=self.stream + if unicode: + msg=self.formatter.format(record) + fs='%s'+self.terminator + try: + if(isinstance(msg,unicode)and getattr(stream,'encoding',None)): + fs=fs.decode(stream.encoding) + try: + stream.write(fs%msg) + except UnicodeEncodeError: + stream.write((fs%msg).encode(stream.encoding)) + else: + stream.write(fs%msg) + except UnicodeError: + stream.write((fs%msg).encode("UTF-8")) + else: + logging.StreamHandler.emit(self,record) +class formatter(logging.Formatter): + def __init__(self): + logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) + def format(self,rec): + try: + msg=rec.msg.decode('utf-8') + except Exception: + msg=rec.msg + use=colors_lst['USE'] + if(use==1 and rec.stream.isatty())or use==2: + c1=getattr(rec,'c1',None) + if c1 is None: + c1='' + if rec.levelno>=logging.ERROR: + c1=colors.RED + elif rec.levelno>=logging.WARNING: + c1=colors.YELLOW + elif rec.levelno>=logging.INFO: + c1=colors.GREEN + c2=getattr(rec,'c2',colors.NORMAL) + msg='%s%s%s'%(c1,msg,c2) + else: + msg=msg.replace('\r','\n') + msg=re.sub(r'\x1B\[(K|.*?(m|h|l))','',msg) + if rec.levelno>=logging.INFO: + return msg + rec.msg=msg + rec.c1=colors.PINK + rec.c2=colors.NORMAL + return logging.Formatter.format(self,rec) +log=None +def debug(*k,**kw): + if verbose: + k=list(k) + k[0]=k[0].replace('\n',' ') + global log + log.debug(*k,**kw) +def error(*k,**kw): + global log + log.error(*k,**kw) + if verbose>2: + st=traceback.extract_stack() + if st: + st=st[:-1] + buf=[] + for filename,lineno,name,line in st: + buf.append(' File "%s", line %d, in %s'%(filename,lineno,name)) + if line: + buf.append(' %s'%line.strip()) + if buf:log.error("\n".join(buf)) +def warn(*k,**kw): + global log + log.warn(*k,**kw) +def info(*k,**kw): + global log + log.info(*k,**kw) +def init_log(): + global log + log=logging.getLogger('waflib') + log.handlers=[] + log.filters=[] + hdlr=log_handler() + hdlr.setFormatter(formatter()) + log.addHandler(hdlr) + log.addFilter(log_filter()) + log.setLevel(logging.DEBUG) +def make_logger(path,name): + logger=logging.getLogger(name) + hdlr=logging.FileHandler(path,'w') + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.setLevel(logging.DEBUG) + return logger +def make_mem_logger(name,to_log,size=8192): + from logging.handlers import MemoryHandler + logger=logging.getLogger(name) + hdlr=MemoryHandler(size,target=to_log) + formatter=logging.Formatter('%(message)s') + hdlr.setFormatter(formatter) + logger.addHandler(hdlr) + logger.memhandler=hdlr + logger.setLevel(logging.DEBUG) + return logger +def free_logger(logger): + try: + for x in logger.handlers: + x.close() + logger.removeHandler(x) + except Exception: + pass +def pprint(col,msg,label='',sep='\n'): + info("%s%s%s %s"%(colors(col),msg,colors.NORMAL,label),extra={'terminator':sep}) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py new file mode 100644 index 00000000..f7eb365e --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py @@ -0,0 +1,491 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,sys,shutil +from waflib import Utils,Errors +exclude_regs=''' +**/*~ +**/#*# +**/.#* +**/%*% +**/._* +**/CVS +**/CVS/** +**/.cvsignore +**/SCCS +**/SCCS/** +**/vssver.scc +**/.svn +**/.svn/** +**/BitKeeper +**/.git +**/.git/** +**/.gitignore +**/.bzr +**/.bzrignore +**/.bzr/** +**/.hg +**/.hg/** +**/_MTN +**/_MTN/** +**/.arch-ids +**/{arch} +**/_darcs +**/_darcs/** +**/.intlcache +**/.DS_Store''' +split_path=Utils.split_path +split_path_unix=Utils.split_path_unix +split_path_cygwin=Utils.split_path_cygwin +split_path_win32=Utils.split_path_win32 +class Node(object): + dict_class=dict + __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig') + def __init__(self,name,parent): + self.name=name + self.parent=parent + if parent: + if name in parent.children: + raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent)) + parent.children[name]=self + def __setstate__(self,data): + self.name=data[0] + self.parent=data[1] + if data[2]is not None: + self.children=self.dict_class(data[2]) + if data[3]is not None: + self.sig=data[3] + def __getstate__(self): + return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None)) + def __str__(self): + return self.name + def __repr__(self): + return self.abspath() + def __hash__(self): + return id(self) + def __eq__(self,node): + return id(self)==id(node) + def __copy__(self): + raise Errors.WafError('nodes are not supposed to be copied') + def read(self,flags='r',encoding='ISO8859-1'): + return Utils.readf(self.abspath(),flags,encoding) + def write(self,data,flags='w',encoding='ISO8859-1'): + Utils.writef(self.abspath(),data,flags,encoding) + def read_json(self,convert=True,encoding='utf-8'): + import json + object_pairs_hook=None + if convert and sys.hexversion<0x3000000: + try: + _type=unicode + except NameError: + _type=str + def convert(value): + if isinstance(value,list): + return[convert(element)for element in value] + elif isinstance(value,_type): + return str(value) + else: + return value + def object_pairs(pairs): + return dict((str(pair[0]),convert(pair[1]))for pair in pairs) + object_pairs_hook=object_pairs + return json.loads(self.read(encoding=encoding),object_pairs_hook=object_pairs_hook) + def write_json(self,data,pretty=True): + import json + indent=2 + separators=(',',': ') + sort_keys=pretty + newline=os.linesep + if not pretty: + indent=None + separators=(',',':') + newline='' + output=json.dumps(data,indent=indent,separators=separators,sort_keys=sort_keys)+newline + self.write(output,encoding='utf-8') + def chmod(self,val): + os.chmod(self.abspath(),val) + def delete(self): + try: + try: + if hasattr(self,'children'): + shutil.rmtree(self.abspath()) + else: + os.remove(self.abspath()) + except OSError as e: + if os.path.exists(self.abspath()): + raise e + finally: + self.evict() + def evict(self): + del self.parent.children[self.name] + def suffix(self): + k=max(0,self.name.rfind('.')) + return self.name[k:] + def height(self): + d=self + val=-1 + while d: + d=d.parent + val+=1 + return val + def listdir(self): + lst=Utils.listdir(self.abspath()) + lst.sort() + return lst + def mkdir(self): + if getattr(self,'cache_isdir',None): + return + try: + self.parent.mkdir() + except OSError: + pass + if self.name: + try: + os.makedirs(self.abspath()) + except OSError: + pass + if not os.path.isdir(self.abspath()): + raise Errors.WafError('Could not create the directory %s'%self.abspath()) + try: + self.children + except AttributeError: + self.children=self.dict_class() + self.cache_isdir=True + def find_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + try: + ch=cur.children + except AttributeError: + cur.children=self.dict_class() + else: + try: + cur=ch[x] + continue + except KeyError: + pass + cur=self.__class__(x,cur) + try: + os.stat(cur.abspath()) + except OSError: + cur.evict() + return None + ret=cur + try: + os.stat(ret.abspath()) + except OSError: + ret.evict() + return None + try: + while not getattr(cur.parent,'cache_isdir',None): + cur=cur.parent + cur.cache_isdir=True + except AttributeError: + pass + return ret + def make_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + continue + if getattr(cur,'children',{}): + if x in cur.children: + cur=cur.children[x] + continue + else: + cur.children=self.dict_class() + cur=self.__class__(x,cur) + return cur + def search_node(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + cur=self + for x in lst: + if x=='..': + cur=cur.parent or cur + else: + try: + cur=cur.children[x] + except(AttributeError,KeyError): + return None + return cur + def path_from(self,node): + c1=self + c2=node + c1h=c1.height() + c2h=c2.height() + lst=[] + up=0 + while c1h>c2h: + lst.append(c1.name) + c1=c1.parent + c1h-=1 + while c2h>c1h: + up+=1 + c2=c2.parent + c2h-=1 + while id(c1)!=id(c2): + lst.append(c1.name) + up+=1 + c1=c1.parent + c2=c2.parent + if c1.parent: + for i in range(up): + lst.append('..') + else: + if lst and not Utils.is_win32: + lst.append('') + lst.reverse() + return os.sep.join(lst)or'.' + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if not self.parent: + val=os.sep + elif not self.parent.name: + val=os.sep+self.name + else: + val=self.parent.abspath()+os.sep+self.name + self.cache_abspath=val + return val + if Utils.is_win32: + def abspath(self): + try: + return self.cache_abspath + except AttributeError: + pass + if not self.parent: + val='' + elif not self.parent.name: + val=self.name+os.sep + else: + val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name + self.cache_abspath=val + return val + def is_child_of(self,node): + p=self + diff=self.height()-node.height() + while diff>0: + diff-=1 + p=p.parent + return id(p)==id(node) + def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True): + dircont=self.listdir() + dircont.sort() + try: + lst=set(self.children.keys()) + except AttributeError: + self.children=self.dict_class() + else: + if remove: + for x in lst-set(dircont): + self.children[x].evict() + for name in dircont: + npats=accept(name,pats) + if npats and npats[0]: + accepted=[]in npats[0] + node=self.make_node([name]) + isdir=os.path.isdir(node.abspath()) + if accepted: + if isdir: + if dir: + yield node + else: + if src: + yield node + if getattr(node,'cache_isdir',None)or isdir: + node.cache_isdir=True + if maxdepth: + for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove): + yield k + raise StopIteration + def ant_glob(self,*k,**kw): + src=kw.get('src',True) + dir=kw.get('dir',False) + excl=kw.get('excl',exclude_regs) + incl=k and k[0]or kw.get('incl','**') + reflags=kw.get('ignorecase',0)and re.I + def to_pat(s): + lst=Utils.to_list(s) + ret=[] + for x in lst: + x=x.replace('\\','/').replace('//','/') + if x.endswith('/'): + x+='**' + lst2=x.split('/') + accu=[] + for k in lst2: + if k=='**': + accu.append(k) + else: + k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+') + k='^%s$'%k + try: + accu.append(re.compile(k,flags=reflags)) + except Exception as e: + raise Errors.WafError("Invalid pattern: %s"%k,e) + ret.append(accu) + return ret + def filtre(name,nn): + ret=[] + for lst in nn: + if not lst: + pass + elif lst[0]=='**': + ret.append(lst) + if len(lst)>1: + if lst[1].match(name): + ret.append(lst[2:]) + else: + ret.append([]) + elif lst[0].match(name): + ret.append(lst[1:]) + return ret + def accept(name,pats): + nacc=filtre(name,pats[0]) + nrej=filtre(name,pats[1]) + if[]in nrej: + nacc=[] + return[nacc,nrej] + ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=kw.get('maxdepth',25),dir=dir,src=src,remove=kw.get('remove',True))] + if kw.get('flat',False): + return' '.join([x.path_from(self)for x in ret]) + return ret + def is_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return False + if id(cur)==x: + return True + cur=cur.parent + return False + def is_bld(self): + cur=self + y=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==y: + return True + cur=cur.parent + return False + def get_src(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + lst.reverse() + return self.ctx.srcnode.make_node(lst) + if id(cur)==x: + return self + lst.append(cur.name) + cur=cur.parent + return self + def get_bld(self): + cur=self + x=id(self.ctx.srcnode) + y=id(self.ctx.bldnode) + lst=[] + while cur.parent: + if id(cur)==y: + return self + if id(cur)==x: + lst.reverse() + return self.ctx.bldnode.make_node(lst) + lst.append(cur.name) + cur=cur.parent + lst.reverse() + if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'): + lst[0]=lst[0][0] + return self.ctx.bldnode.make_node(['__root__']+lst) + def find_resource(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if not node: + self=self.get_src() + node=self.find_node(lst) + if node: + if os.path.isdir(node.abspath()): + return None + return node + def find_or_declare(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.get_bld().search_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + self=self.get_src() + node=self.find_node(lst) + if node: + if not os.path.isfile(node.abspath()): + node.sig=None + node.parent.mkdir() + return node + node=self.get_bld().make_node(lst) + node.parent.mkdir() + return node + def find_dir(self,lst): + if isinstance(lst,str): + lst=[x for x in split_path(lst)if x and x!='.'] + node=self.find_node(lst) + try: + if not os.path.isdir(node.abspath()): + return None + except(OSError,AttributeError): + return None + return node + def change_ext(self,ext,ext_in=None): + name=self.name + if ext_in is None: + k=name.rfind('.') + if k>=0: + name=name[:k]+ext + else: + name=name+ext + else: + name=name[:-len(ext_in)]+ext + return self.parent.find_or_declare([name]) + def bldpath(self): + return self.path_from(self.ctx.bldnode) + def srcpath(self): + return self.path_from(self.ctx.srcnode) + def relpath(self): + cur=self + x=id(self.ctx.bldnode) + while cur.parent: + if id(cur)==x: + return self.bldpath() + cur=cur.parent + return self.srcpath() + def bld_dir(self): + return self.parent.bldpath() + def get_bld_sig(self): + try: + return self.cache_sig + except AttributeError: + pass + if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: + self.sig=Utils.h_file(self.abspath()) + self.cache_sig=ret=self.sig + return ret +pickle_lock=Utils.threading.Lock() +class Nod3(Node): + pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py new file mode 100644 index 00000000..5101f5fc --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py @@ -0,0 +1,147 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,tempfile,optparse,sys,re +from waflib import Logs,Utils,Context +cmds='distclean configure build install clean uninstall check dist distcheck'.split() +options={} +commands=[] +envvars=[] +lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform) +platform=Utils.unversioned_sys_platform() +class opt_parser(optparse.OptionParser): + def __init__(self,ctx): + optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION)) + self.formatter.width=Logs.get_term_cols() + self.ctx=ctx + def print_usage(self,file=None): + return self.print_help(file) + def get_usage(self): + cmds_str={} + for cls in Context.classes: + if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'): + continue + s=cls.__doc__ or'' + cmds_str[cls.cmd]=s + if Context.g_module: + for(k,v)in Context.g_module.__dict__.items(): + if k in('options','init','shutdown'): + continue + if type(v)is type(Context.create_context): + if v.__doc__ and not k.startswith('_'): + cmds_str[k]=v.__doc__ + just=0 + for k in cmds_str: + just=max(just,len(k)) + lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()] + lst.sort() + ret='\n'.join(lst) + return'''waf [commands] [options] + +Main commands (example: ./waf build -j4) +%s +'''%ret +class OptionsContext(Context.Context): + cmd='options' + fun='options' + def __init__(self,**kw): + super(OptionsContext,self).__init__(**kw) + self.parser=opt_parser(self) + self.option_groups={} + jobs=self.jobs() + p=self.add_option + color=os.environ.get('NOCOLOR','')and'no'or'auto' + p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto')) + p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs) + p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)') + p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]') + p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)') + gr=self.add_option_group('Configuration options') + self.option_groups['configure options']=gr + gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out') + gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top') + gr.add_option('--no-lock-in-run',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_run') + gr.add_option('--no-lock-in-out',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_out') + gr.add_option('--no-lock-in-top',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_top') + default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX')) + if not default_prefix: + if platform=='win32': + d=tempfile.gettempdir() + default_prefix=d[0].upper()+d[1:] + else: + default_prefix='/usr/local/' + gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix) + gr.add_option('--bindir',dest='bindir',help='bindir') + gr.add_option('--libdir',dest='libdir',help='libdir') + gr=self.add_option_group('Build and installation options') + self.option_groups['build and install options']=gr + gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output') + gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"') + gr=self.add_option_group('Step options') + self.option_groups['step options']=gr + gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') + default_destdir=os.environ.get('DESTDIR','') + gr=self.add_option_group('Installation and uninstallation options') + self.option_groups['install/uninstall options']=gr + gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') + gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation') + gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store') + def jobs(self): + count=int(os.environ.get('JOBS',0)) + if count<1: + if'NUMBER_OF_PROCESSORS'in os.environ: + count=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) + else: + if hasattr(os,'sysconf_names'): + if'SC_NPROCESSORS_ONLN'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_ONLN')) + elif'SC_NPROCESSORS_CONF'in os.sysconf_names: + count=int(os.sysconf('SC_NPROCESSORS_CONF')) + if not count and os.name not in('nt','java'): + try: + tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0) + except Exception: + pass + else: + if re.match('^[0-9]+$',tmp): + count=int(tmp) + if count<1: + count=1 + elif count>1024: + count=1024 + return count + def add_option(self,*k,**kw): + return self.parser.add_option(*k,**kw) + def add_option_group(self,*k,**kw): + try: + gr=self.option_groups[k[0]] + except KeyError: + gr=self.parser.add_option_group(*k,**kw) + self.option_groups[k[0]]=gr + return gr + def get_option_group(self,opt_str): + try: + return self.option_groups[opt_str] + except KeyError: + for group in self.parser.option_groups: + if group.title==opt_str: + return group + return None + def parse_args(self,_args=None): + global options,commands,envvars + (options,leftover_args)=self.parser.parse_args(args=_args) + for arg in leftover_args: + if'='in arg: + envvars.append(arg) + else: + commands.append(arg) + if options.destdir: + options.destdir=Utils.sane_path(options.destdir) + if options.verbose>=1: + self.load('errcheck') + colors={'yes':2,'auto':1,'no':0}[options.colors] + Logs.enable_colors(colors) + def execute(self): + super(OptionsContext,self).execute() + self.parse_args() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py new file mode 100644 index 00000000..ab661a22 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py @@ -0,0 +1,207 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import random,atexit +try: + from queue import Queue +except ImportError: + from Queue import Queue +from waflib import Utils,Task,Errors,Logs +GAP=10 +class TaskConsumer(Utils.threading.Thread): + def __init__(self): + Utils.threading.Thread.__init__(self) + self.ready=Queue() + self.setDaemon(1) + self.start() + def run(self): + try: + self.loop() + except Exception: + pass + def loop(self): + while 1: + tsk=self.ready.get() + if not isinstance(tsk,Task.TaskBase): + tsk(self) + else: + tsk.process() +pool=Queue() +def get_pool(): + try: + return pool.get(False) + except Exception: + return TaskConsumer() +def put_pool(x): + pool.put(x) +def _free_resources(): + global pool + lst=[] + while pool.qsize(): + lst.append(pool.get()) + for x in lst: + x.ready.put(None) + for x in lst: + x.join() + pool=None +atexit.register(_free_resources) +class Parallel(object): + def __init__(self,bld,j=2): + self.numjobs=j + self.bld=bld + self.outstanding=[] + self.frozen=[] + self.out=Queue(0) + self.count=0 + self.processed=1 + self.stop=False + self.error=[] + self.biter=None + self.dirty=False + def get_next_task(self): + if not self.outstanding: + return None + return self.outstanding.pop(0) + def postpone(self,tsk): + if random.randint(0,1): + self.frozen.insert(0,tsk) + else: + self.frozen.append(tsk) + def refill_task_list(self): + while self.count>self.numjobs*GAP: + self.get_out() + while not self.outstanding: + if self.count: + self.get_out() + elif self.frozen: + try: + cond=self.deadlock==self.processed + except AttributeError: + pass + else: + if cond: + msg='check the build order for the tasks' + for tsk in self.frozen: + if not tsk.run_after: + msg='check the methods runnable_status' + break + lst=[] + for tsk in self.frozen: + lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after])) + raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst))) + self.deadlock=self.processed + if self.frozen: + self.outstanding+=self.frozen + self.frozen=[] + elif not self.count: + self.outstanding.extend(next(self.biter)) + self.total=self.bld.total() + break + def add_more_tasks(self,tsk): + if getattr(tsk,'more_tasks',None): + self.outstanding+=tsk.more_tasks + self.total+=len(tsk.more_tasks) + def get_out(self): + tsk=self.out.get() + if not self.stop: + self.add_more_tasks(tsk) + self.count-=1 + self.dirty=True + return tsk + def add_task(self,tsk): + try: + self.pool + except AttributeError: + self.init_task_pool() + self.ready.put(tsk) + def init_task_pool(self): + pool=self.pool=[get_pool()for i in range(self.numjobs)] + self.ready=Queue(0) + def setq(consumer): + consumer.ready=self.ready + for x in pool: + x.ready.put(setq) + return pool + def free_task_pool(self): + def setq(consumer): + consumer.ready=Queue(0) + self.out.put(self) + try: + pool=self.pool + except AttributeError: + pass + else: + for x in pool: + self.ready.put(setq) + for x in pool: + self.get_out() + for x in pool: + put_pool(x) + self.pool=[] + def skip(self,tsk): + tsk.hasrun=Task.SKIPPED + def error_handler(self,tsk): + if hasattr(tsk,'scan')and hasattr(tsk,'uid'): + key=(tsk.uid(),'imp') + try: + del self.bld.task_sigs[key] + except KeyError: + pass + if not self.bld.keep: + self.stop=True + self.error.append(tsk) + def task_status(self,tsk): + try: + return tsk.runnable_status() + except Exception: + self.processed+=1 + tsk.err_msg=Utils.ex_stack() + if not self.stop and self.bld.keep: + self.skip(tsk) + if self.bld.keep==1: + if Logs.verbose>1 or not self.error: + self.error.append(tsk) + self.stop=True + else: + if Logs.verbose>1: + self.error.append(tsk) + return Task.EXCEPTION + tsk.hasrun=Task.EXCEPTION + self.error_handler(tsk) + return Task.EXCEPTION + def start(self): + self.total=self.bld.total() + while not self.stop: + self.refill_task_list() + tsk=self.get_next_task() + if not tsk: + if self.count: + continue + else: + break + if tsk.hasrun: + self.processed+=1 + continue + if self.stop: + break + st=self.task_status(tsk) + if st==Task.RUN_ME: + tsk.position=(self.processed,self.total) + self.count+=1 + tsk.master=self + self.processed+=1 + if self.numjobs==1: + tsk.process() + else: + self.add_task(tsk) + if st==Task.ASK_LATER: + self.postpone(tsk) + elif st==Task.SKIP_ME: + self.processed+=1 + self.skip(tsk) + self.add_more_tasks(tsk) + while self.error and self.count: + self.get_out() + assert(self.count==0 or self.stop) + self.free_task_pool() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py new file mode 100644 index 00000000..dcbab750 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py @@ -0,0 +1,407 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,shlex,shutil,traceback,errno,sys,stat +from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node +build_dir_override=None +no_climb_commands=['configure'] +default_cmd="build" +def waf_entry_point(current_directory,version,wafdir): + Logs.init_log() + if Context.WAFVERSION!=version: + Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir)) + sys.exit(1) + if'--version'in sys.argv: + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + if len(sys.argv)>1: + potential_wscript=os.path.join(current_directory,sys.argv[1]) + if os.path.basename(potential_wscript)=='wscript'and os.path.isfile(potential_wscript): + current_directory=os.path.normpath(os.path.dirname(potential_wscript)) + sys.argv.pop(1) + Context.waf_dir=wafdir + Context.launch_dir=current_directory + no_climb=os.environ.get('NOCLIMB',None) + if not no_climb: + for k in no_climb_commands: + for y in sys.argv: + if y.startswith(k): + no_climb=True + break + for i,x in enumerate(sys.argv): + if x.startswith('--top='): + Context.run_dir=Context.top_dir=Utils.sane_path(x[6:]) + sys.argv[i]='--top='+Context.run_dir + if x.startswith('--out='): + Context.out_dir=Utils.sane_path(x[6:]) + sys.argv[i]='--out='+Context.out_dir + cur=current_directory + while cur and not Context.top_dir: + lst=os.listdir(cur) + if Options.lockfile in lst: + env=ConfigSet.ConfigSet() + try: + env.load(os.path.join(cur,Options.lockfile)) + ino=os.stat(cur)[stat.ST_INO] + except Exception: + pass + else: + for x in(env.run_dir,env.top_dir,env.out_dir): + if Utils.is_win32: + if cur==x: + load=True + break + else: + try: + ino2=os.stat(x)[stat.ST_INO] + except OSError: + pass + else: + if ino==ino2: + load=True + break + else: + Logs.warn('invalid lock file in %s'%cur) + load=False + if load: + Context.run_dir=env.run_dir + Context.top_dir=env.top_dir + Context.out_dir=env.out_dir + break + if not Context.run_dir: + if Context.WSCRIPT_FILE in lst: + Context.run_dir=cur + next=os.path.dirname(cur) + if next==cur: + break + cur=next + if no_climb: + break + if not Context.run_dir: + if'-h'in sys.argv or'--help'in sys.argv: + Logs.warn('No wscript file found: the help message may be incomplete') + Context.run_dir=current_directory + ctx=Context.create_context('options') + ctx.curdir=current_directory + ctx.parse_args() + sys.exit(0) + Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE) + sys.exit(1) + try: + os.chdir(Context.run_dir) + except OSError: + Logs.error('Waf: The folder %r is unreadable'%Context.run_dir) + sys.exit(1) + try: + set_main_module(os.path.normpath(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))) + except Errors.WafError as e: + Logs.pprint('RED',e.verbose_msg) + Logs.error(str(e)) + sys.exit(1) + except Exception as e: + Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e) + traceback.print_exc(file=sys.stdout) + sys.exit(2) + try: + run_commands() + except Errors.WafError as e: + if Logs.verbose>1: + Logs.pprint('RED',e.verbose_msg) + Logs.error(e.msg) + sys.exit(1) + except SystemExit: + raise + except Exception as e: + traceback.print_exc(file=sys.stdout) + sys.exit(2) + except KeyboardInterrupt: + Logs.pprint('RED','Interrupted') + sys.exit(68) +def set_main_module(file_path): + Context.g_module=Context.load_module(file_path) + Context.g_module.root_path=file_path + def set_def(obj): + name=obj.__name__ + if not name in Context.g_module.__dict__: + setattr(Context.g_module,name,obj) + for k in(update,dist,distclean,distcheck): + set_def(k) + if not'init'in Context.g_module.__dict__: + Context.g_module.init=Utils.nada + if not'shutdown'in Context.g_module.__dict__: + Context.g_module.shutdown=Utils.nada + if not'options'in Context.g_module.__dict__: + Context.g_module.options=Utils.nada +def parse_options(): + Context.create_context('options').execute() + for var in Options.envvars: + (name,value)=var.split('=',1) + os.environ[name.strip()]=value + if not Options.commands: + Options.commands=[default_cmd] + Options.commands=[x for x in Options.commands if x!='options'] + Logs.verbose=Options.options.verbose + if Options.options.zones: + Logs.zones=Options.options.zones.split(',') + if not Logs.verbose: + Logs.verbose=1 + elif Logs.verbose>0: + Logs.zones=['runner'] + if Logs.verbose>2: + Logs.zones=['*'] +def run_command(cmd_name): + ctx=Context.create_context(cmd_name) + ctx.log_timer=Utils.Timer() + ctx.options=Options.options + ctx.cmd=cmd_name + try: + ctx.execute() + finally: + ctx.finalize() + return ctx +def run_commands(): + parse_options() + run_command('init') + while Options.commands: + cmd_name=Options.commands.pop(0) + ctx=run_command(cmd_name) + Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer))) + run_command('shutdown') +def _can_distclean(name): + for k in'.o .moc .exe'.split(): + if name.endswith(k): + return True + return False +def distclean_dir(dirname): + for(root,dirs,files)in os.walk(dirname): + for f in files: + if _can_distclean(f): + fname=os.path.join(root,f) + try: + os.remove(fname) + except OSError: + Logs.warn('Could not remove %r'%fname) + for x in(Context.DBFILE,'config.log'): + try: + os.remove(x) + except OSError: + pass + try: + shutil.rmtree('c4che') + except OSError: + pass +def distclean(ctx): + '''removes the build directory''' + lst=os.listdir('.') + for f in lst: + if f==Options.lockfile: + try: + proj=ConfigSet.ConfigSet(f) + except IOError: + Logs.warn('Could not read %r'%f) + continue + if proj['out_dir']!=proj['top_dir']: + try: + shutil.rmtree(proj['out_dir']) + except IOError: + pass + except OSError as e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%proj['out_dir']) + else: + distclean_dir(proj['out_dir']) + for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']): + p=os.path.join(k,Options.lockfile) + try: + os.remove(p) + except OSError as e: + if e.errno!=errno.ENOENT: + Logs.warn('Could not remove %r'%p) + if not Options.commands: + for x in'.waf-1. waf-1. .waf3-1. waf3-1.'.split(): + if f.startswith(x): + shutil.rmtree(f,ignore_errors=True) +class Dist(Context.Context): + '''creates an archive containing the project source code''' + cmd='dist' + fun='dist' + algo='tar.bz2' + ext_algo={} + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + def archive(self): + import tarfile + arch_name=self.get_arch_name() + try: + self.base_path + except AttributeError: + self.base_path=self.path + node=self.base_path.make_node(arch_name) + try: + node.delete() + except OSError: + pass + files=self.get_files() + if self.algo.startswith('tar.'): + tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.','')) + for x in files: + self.add_tar_file(x,tar) + tar.close() + elif self.algo=='zip': + import zipfile + zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED) + for x in files: + archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) + zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) + zip.close() + else: + self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') + try: + from hashlib import sha1 as sha + except ImportError: + from sha import sha + try: + digest=" (sha=%r)"%sha(node.read()).hexdigest() + except Exception: + digest='' + Logs.info('New archive created: %s%s'%(self.arch_name,digest)) + def get_tar_path(self,node): + return node.abspath() + def add_tar_file(self,x,tar): + p=self.get_tar_path(x) + tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path)) + tinfo.uid=0 + tinfo.gid=0 + tinfo.uname='root' + tinfo.gname='root' + fu=None + try: + fu=open(p,'rb') + tar.addfile(tinfo,fileobj=fu) + finally: + if fu: + fu.close() + def get_tar_prefix(self): + try: + return self.tar_prefix + except AttributeError: + return self.get_base_name() + def get_arch_name(self): + try: + self.arch_name + except AttributeError: + self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo) + return self.arch_name + def get_base_name(self): + try: + self.base_name + except AttributeError: + appname=getattr(Context.g_module,Context.APPNAME,'noname') + version=getattr(Context.g_module,Context.VERSION,'1.0') + self.base_name=appname+'-'+version + return self.base_name + def get_excl(self): + try: + return self.excl + except AttributeError: + self.excl=Node.exclude_regs+' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' + if Context.out_dir: + nd=self.root.find_node(Context.out_dir) + if nd: + self.excl+=' '+nd.path_from(self.base_path) + return self.excl + def get_files(self): + try: + files=self.files + except AttributeError: + files=self.base_path.ant_glob('**/*',excl=self.get_excl()) + return files +def dist(ctx): + '''makes a tarball for redistributing the sources''' + pass +class DistCheck(Dist): + fun='distcheck' + cmd='distcheck' + def execute(self): + self.recurse([os.path.dirname(Context.g_module.root_path)]) + self.archive() + self.check() + def check(self): + import tempfile,tarfile + t=None + try: + t=tarfile.open(self.get_arch_name()) + for x in t: + t.extract(x) + finally: + if t: + t.close() + cfg=[] + if Options.options.distcheck_args: + cfg=shlex.split(Options.options.distcheck_args) + else: + cfg=[x for x in sys.argv if x.startswith('-')] + instdir=tempfile.mkdtemp('.inst',self.get_base_name()) + ret=Utils.subprocess.Popen([sys.executable,sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait() + if ret: + raise Errors.WafError('distcheck failed with code %i'%ret) + if os.path.exists(instdir): + raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir) + shutil.rmtree(self.get_base_name()) +def distcheck(ctx): + '''checks if the project compiles (tarball from 'dist')''' + pass +def update(ctx): + lst=Options.options.files + if lst: + lst=lst.split(',') + else: + path=os.path.join(Context.waf_dir,'waflib','extras') + lst=[x for x in Utils.listdir(path)if x.endswith('.py')] + for x in lst: + tool=x.replace('.py','') + if not tool: + continue + try: + dl=Configure.download_tool + except AttributeError: + ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!') + try: + dl(tool,force=True,ctx=ctx) + except Errors.WafError: + Logs.error('Could not find the tool %r in the remote repository'%x) + else: + Logs.warn('Updated %r'%tool) +def autoconfigure(execute_method): + def execute(self): + if not Configure.autoconfig: + return execute_method(self) + env=ConfigSet.ConfigSet() + do_config=False + try: + env.load(os.path.join(Context.top_dir,Options.lockfile)) + except Exception: + Logs.warn('Configuring the project') + do_config=True + else: + if env.run_dir!=Context.run_dir: + do_config=True + else: + h=0 + for f in env['files']: + h=Utils.h_list((h,Utils.readf(f,'rb'))) + do_config=h!=env.hash + if do_config: + Options.commands.insert(0,self.cmd) + Options.commands.insert(0,'configure') + if Configure.autoconfig=='clobber': + Options.options.__dict__=env.options + return + return execute_method(self) + return execute +Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py new file mode 100644 index 00000000..89ed5f62 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py @@ -0,0 +1,686 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,sys +from waflib import Utils,Logs,Errors +NOT_RUN=0 +MISSING=1 +CRASHED=2 +EXCEPTION=3 +SKIPPED=8 +SUCCESS=9 +ASK_LATER=-1 +SKIP_ME=-2 +RUN_ME=-3 +COMPILE_TEMPLATE_SHELL=''' +def f(tsk): + env = tsk.env + gen = tsk.generator + bld = gen.bld + cwdx = getattr(bld, 'cwdx', bld.bldnode) # TODO single cwd value in waf 1.9 + wd = getattr(tsk, 'cwd', None) + p = env.get_flat + tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s + return tsk.exec_command(cmd, cwd=wd, env=env.env or None) +''' +COMPILE_TEMPLATE_NOSHELL=''' +def f(tsk): + env = tsk.env + gen = tsk.generator + bld = gen.bld + cwdx = getattr(bld, 'cwdx', bld.bldnode) # TODO single cwd value in waf 1.9 + wd = getattr(tsk, 'cwd', None) + def to_list(xx): + if isinstance(xx, str): return [xx] + return xx + tsk.last_cmd = lst = [] + %s + lst = [x for x in lst if x] + return tsk.exec_command(lst, cwd=wd, env=env.env or None) +''' +classes={} +class store_task_type(type): + def __init__(cls,name,bases,dict): + super(store_task_type,cls).__init__(name,bases,dict) + name=cls.__name__ + if name.endswith('_task'): + name=name.replace('_task','') + if name!='evil'and name!='TaskBase': + global classes + if getattr(cls,'run_str',None): + (f,dvars)=compile_fun(cls.run_str,cls.shell) + cls.hcode=Utils.h_cmd(cls.run_str) + cls.orig_run_str=cls.run_str + cls.run_str=None + cls.run=f + cls.vars=list(set(cls.vars+dvars)) + cls.vars.sort() + elif getattr(cls,'run',None)and not'hcode'in cls.__dict__: + cls.hcode=Utils.h_cmd(cls.run) + getattr(cls,'register',classes)[name]=cls +evil=store_task_type('evil',(object,),{}) +class TaskBase(evil): + color='GREEN' + ext_in=[] + ext_out=[] + before=[] + after=[] + hcode='' + def __init__(self,*k,**kw): + self.hasrun=NOT_RUN + try: + self.generator=kw['generator'] + except KeyError: + self.generator=self + def __repr__(self): + return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun',''))) + def __str__(self): + if hasattr(self,'fun'): + return self.fun.__name__ + return self.__class__.__name__ + def __hash__(self): + return id(self) + def keyword(self): + if hasattr(self,'fun'): + return'Function' + return'Processing' + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return bld.exec_command(cmd,**kw) + def runnable_status(self): + return RUN_ME + def process(self): + m=self.master + if m.stop: + m.out.put(self) + return + try: + del self.generator.bld.task_sigs[self.uid()] + except KeyError: + pass + try: + self.generator.bld.returned_tasks.append(self) + self.log_display(self.generator.bld) + ret=self.run() + except Exception: + self.err_msg=Utils.ex_stack() + self.hasrun=EXCEPTION + m.error_handler(self) + m.out.put(self) + return + if ret: + self.err_code=ret + self.hasrun=CRASHED + else: + try: + self.post_run() + except Errors.WafError: + pass + except Exception: + self.err_msg=Utils.ex_stack() + self.hasrun=EXCEPTION + else: + self.hasrun=SUCCESS + if self.hasrun!=SUCCESS: + m.error_handler(self) + m.out.put(self) + def run(self): + if hasattr(self,'fun'): + return self.fun(self) + return 0 + def post_run(self): + pass + def log_display(self,bld): + if self.generator.bld.progress_bar==3: + return + s=self.display() + if s: + if bld.logger: + logger=bld.logger + else: + logger=Logs + if self.generator.bld.progress_bar==1: + c1=Logs.colors.cursor_off + c2=Logs.colors.cursor_on + logger.info(s,extra={'stream':sys.stderr,'terminator':'','c1':c1,'c2':c2}) + else: + logger.info(s,extra={'terminator':'','c1':'','c2':''}) + def display(self): + col1=Logs.colors(self.color) + col2=Logs.colors.NORMAL + master=self.master + def cur(): + tmp=-1 + if hasattr(master,'ready'): + tmp-=master.ready.qsize() + return master.processed+tmp + if self.generator.bld.progress_bar==1: + return self.generator.bld.progress_line(cur(),master.total,col1,col2) + if self.generator.bld.progress_bar==2: + ela=str(self.generator.bld.timer) + try: + ins=','.join([n.name for n in self.inputs]) + except AttributeError: + ins='' + try: + outs=','.join([n.name for n in self.outputs]) + except AttributeError: + outs='' + return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela) + s=str(self) + if not s: + return None + total=master.total + n=len(str(total)) + fs='[%%%dd/%%%dd] %%s%%s%%s%%s\n'%(n,n) + kw=self.keyword() + if kw: + kw+=' ' + return fs%(cur(),total,kw,col1,s,col2) + def attr(self,att,default=None): + ret=getattr(self,att,self) + if ret is self:return getattr(self.__class__,att,default) + return ret + def hash_constraints(self): + cls=self.__class__ + tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode) + h=hash(tup) + return h + def format_error(self): + msg=getattr(self,'last_cmd','') + name=getattr(self.generator,'name','') + if getattr(self,"err_msg",None): + return self.err_msg + elif not self.hasrun: + return'task in %r was not executed for some reason: %r'%(name,self) + elif self.hasrun==CRASHED: + try: + return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg) + except AttributeError: + return' -> task in %r failed: %r\n%r'%(name,self,msg) + elif self.hasrun==MISSING: + return' -> missing files in %r: %r\n%r'%(name,self,msg) + else: + return'invalid status for task in %r: %r'%(name,self.hasrun) + def colon(self,var1,var2): + tmp=self.env[var1] + if not tmp: + return[] + if isinstance(var2,str): + it=self.env[var2] + else: + it=var2 + if isinstance(tmp,str): + return[tmp%x for x in it] + else: + lst=[] + for y in it: + lst.extend(tmp) + lst.append(y) + return lst +class Task(TaskBase): + vars=[] + shell=False + def __init__(self,*k,**kw): + TaskBase.__init__(self,*k,**kw) + self.env=kw['env'] + self.inputs=[] + self.outputs=[] + self.dep_nodes=[] + self.run_after=set([]) + def __str__(self): + name=self.__class__.__name__ + if self.outputs: + if(name.endswith('lib')or name.endswith('program'))or not self.inputs: + node=self.outputs[0] + return node.path_from(node.ctx.launch_node()) + if not(self.inputs or self.outputs): + return self.__class__.__name__ + if len(self.inputs)==1: + node=self.inputs[0] + return node.path_from(node.ctx.launch_node()) + src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs]) + tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs]) + if self.outputs:sep=' -> ' + else:sep='' + return'%s: %s%s%s'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str) + def keyword(self): + name=self.__class__.__name__ + if name.endswith('lib')or name.endswith('program'): + return'Linking' + if len(self.inputs)==1 and len(self.outputs)==1: + return'Compiling' + if not self.inputs: + if self.outputs: + return'Creating' + else: + return'Running' + return'Processing' + def __repr__(self): + try: + ins=",".join([x.name for x in self.inputs]) + outs=",".join([x.name for x in self.outputs]) + except AttributeError: + ins=",".join([str(x)for x in self.inputs]) + outs=",".join([str(x)for x in self.outputs]) + return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}']) + def uid(self): + try: + return self.uid_ + except AttributeError: + m=Utils.md5() + up=m.update + up(self.__class__.__name__) + for x in self.inputs+self.outputs: + up(x.abspath()) + self.uid_=m.digest() + return self.uid_ + def set_inputs(self,inp): + if isinstance(inp,list):self.inputs+=inp + else:self.inputs.append(inp) + def set_outputs(self,out): + if isinstance(out,list):self.outputs+=out + else:self.outputs.append(out) + def set_run_after(self,task): + assert isinstance(task,TaskBase) + self.run_after.add(task) + def signature(self): + try:return self.cache_sig + except AttributeError:pass + self.m=Utils.md5() + self.m.update(self.hcode) + self.sig_explicit_deps() + self.sig_vars() + if self.scan: + try: + self.sig_implicit_deps() + except Errors.TaskRescan: + return self.signature() + ret=self.cache_sig=self.m.digest() + return ret + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return ASK_LATER + bld=self.generator.bld + try: + new_sig=self.signature() + except Errors.TaskNotReady: + return ASK_LATER + key=self.uid() + try: + prev_sig=bld.task_sigs[key] + except KeyError: + Logs.debug("task: task %r must run as it was never run before or the task code changed"%self) + return RUN_ME + for node in self.outputs: + try: + if node.sig!=new_sig: + return RUN_ME + except AttributeError: + Logs.debug("task: task %r must run as the output nodes do not exist"%self) + return RUN_ME + if new_sig!=prev_sig: + return RUN_ME + return SKIP_ME + def post_run(self): + bld=self.generator.bld + sig=self.signature() + for node in self.outputs: + try: + os.stat(node.abspath()) + except OSError: + self.hasrun=MISSING + self.err_msg='-> missing file: %r'%node.abspath() + raise Errors.WafError(self.err_msg) + node.sig=node.cache_sig=sig + bld.task_sigs[self.uid()]=self.cache_sig + def sig_explicit_deps(self): + bld=self.generator.bld + upd=self.m.update + for x in self.inputs+self.dep_nodes: + try: + upd(x.get_bld_sig()) + except(AttributeError,TypeError): + raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self)) + if bld.deps_man: + additional_deps=bld.deps_man + for x in self.inputs+self.outputs: + try: + d=additional_deps[id(x)] + except KeyError: + continue + for v in d: + if isinstance(v,bld.root.__class__): + try: + v=v.get_bld_sig() + except AttributeError: + raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self)) + elif hasattr(v,'__call__'): + v=v() + upd(v) + return self.m.digest() + def sig_vars(self): + bld=self.generator.bld + env=self.env + upd=self.m.update + act_sig=bld.hash_env_vars(env,self.__class__.vars) + upd(act_sig) + dep_vars=getattr(self,'dep_vars',None) + if dep_vars: + upd(bld.hash_env_vars(env,dep_vars)) + return self.m.digest() + scan=None + def sig_implicit_deps(self): + bld=self.generator.bld + key=self.uid() + prev=bld.task_sigs.get((key,'imp'),[]) + if prev: + try: + if prev==self.compute_sig_implicit_deps(): + return prev + except Errors.TaskNotReady: + raise + except EnvironmentError: + for x in bld.node_deps.get(self.uid(),[]): + if not x.is_bld(): + try: + os.stat(x.abspath()) + except OSError: + try: + del x.parent.children[x.name] + except KeyError: + pass + del bld.task_sigs[(key,'imp')] + raise Errors.TaskRescan('rescan') + (nodes,names)=self.scan() + if Logs.verbose: + Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names))) + bld.node_deps[key]=nodes + bld.raw_deps[key]=names + self.are_implicit_nodes_ready() + try: + bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps() + except Exception: + if Logs.verbose: + for k in bld.node_deps.get(self.uid(),[]): + try: + k.get_bld_sig() + except Exception: + Logs.warn('Missing signature for node %r (may cause rebuilds)'%k) + else: + return sig + def compute_sig_implicit_deps(self): + upd=self.m.update + bld=self.generator.bld + self.are_implicit_nodes_ready() + for k in bld.node_deps.get(self.uid(),[]): + upd(k.get_bld_sig()) + return self.m.digest() + def are_implicit_nodes_ready(self): + bld=self.generator.bld + try: + cache=bld.dct_implicit_nodes + except AttributeError: + bld.dct_implicit_nodes=cache={} + try: + dct=cache[bld.cur] + except KeyError: + dct=cache[bld.cur]={} + for tsk in bld.cur_tasks: + for x in tsk.outputs: + dct[x]=tsk + modified=False + for x in bld.node_deps.get(self.uid(),[]): + if x in dct: + self.run_after.add(dct[x]) + modified=True + if modified: + for tsk in self.run_after: + if not tsk.hasrun: + raise Errors.TaskNotReady('not ready') +if sys.hexversion>0x3000000: + def uid(self): + try: + return self.uid_ + except AttributeError: + m=Utils.md5() + up=m.update + up(self.__class__.__name__.encode('iso8859-1','xmlcharrefreplace')) + for x in self.inputs+self.outputs: + up(x.abspath().encode('iso8859-1','xmlcharrefreplace')) + self.uid_=m.digest() + return self.uid_ + uid.__doc__=Task.uid.__doc__ + Task.uid=uid +def is_before(t1,t2): + to_list=Utils.to_list + for k in to_list(t2.ext_in): + if k in to_list(t1.ext_out): + return 1 + if t1.__class__.__name__ in to_list(t2.after): + return 1 + if t2.__class__.__name__ in to_list(t1.before): + return 1 + return 0 +def set_file_constraints(tasks): + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for x in tasks: + for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]): + ins[id(a)].add(x) + for a in getattr(x,'outputs',[]): + outs[id(a)].add(x) + links=set(ins.keys()).intersection(outs.keys()) + for k in links: + for a in ins[k]: + a.run_after.update(outs[k]) +def set_precedence_constraints(tasks): + cstr_groups=Utils.defaultdict(list) + for x in tasks: + h=x.hash_constraints() + cstr_groups[h].append(x) + keys=list(cstr_groups.keys()) + maxi=len(keys) + for i in range(maxi): + t1=cstr_groups[keys[i]][0] + for j in range(i+1,maxi): + t2=cstr_groups[keys[j]][0] + if is_before(t1,t2): + a=i + b=j + elif is_before(t2,t1): + a=j + b=i + else: + continue + aval=set(cstr_groups[keys[a]]) + for x in cstr_groups[keys[b]]: + x.run_after.update(aval) +def funex(c): + dc={} + exec(c,dc) + return dc['f'] +re_novar=re.compile(r"^(SRC|TGT)\W+.*?$") +reg_act=re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})",re.M) +def compile_fun_shell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\\\' + elif g('subst'):extr.append((g('var'),g('code')));return"%s" + return None + line=reg_act.sub(repl,line)or line + parm=[] + dvars=[] + app=parm.append + for(var,meth)in extr: + if var=='SRC': + if meth:app('tsk.inputs%s'%meth) + else:app('" ".join([a.path_from(cwdx) for a in tsk.inputs])') + elif var=='TGT': + if meth:app('tsk.outputs%s'%meth) + else:app('" ".join([a.path_from(cwdx) for a in tsk.outputs])') + elif meth: + if meth.startswith(':'): + m=meth[1:] + if m=='SRC': + m='[a.path_from(cwdx) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(cwdx) for a in tsk.outputs]' + elif re_novar.match(m): + m='[tsk.inputs%s]'%m[3:] + elif re_novar.match(m): + m='[tsk.outputs%s]'%m[3:] + elif m[:3]not in('tsk','gen','bld'): + dvars.extend([var,meth[1:]]) + m='%r'%m + app('" ".join(tsk.colon(%r, %s))'%(var,m)) + else: + app('%s%s'%(var,meth)) + else: + if not var in dvars:dvars.append(var) + app("p('%s')"%var) + if parm:parm="%% (%s) "%(',\n\t\t'.join(parm)) + else:parm='' + c=COMPILE_TEMPLATE_SHELL%(line,parm) + Logs.debug('action: %s'%c.strip().splitlines()) + return(funex(c),dvars) +def compile_fun_noshell(line): + extr=[] + def repl(match): + g=match.group + if g('dollar'):return"$" + elif g('backslash'):return'\\' + elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>" + return None + line2=reg_act.sub(repl,line) + params=line2.split('<<|@|>>') + assert(extr) + buf=[] + dvars=[] + app=buf.append + for x in range(len(extr)): + params[x]=params[x].strip() + if params[x]: + app("lst.extend(%r)"%params[x].split()) + (var,meth)=extr[x] + if var=='SRC': + if meth:app('lst.append(tsk.inputs%s)'%meth) + else:app("lst.extend([a.path_from(cwdx) for a in tsk.inputs])") + elif var=='TGT': + if meth:app('lst.append(tsk.outputs%s)'%meth) + else:app("lst.extend([a.path_from(cwdx) for a in tsk.outputs])") + elif meth: + if meth.startswith(':'): + m=meth[1:] + if m=='SRC': + m='[a.path_from(cwdx) for a in tsk.inputs]' + elif m=='TGT': + m='[a.path_from(cwdx) for a in tsk.outputs]' + elif re_novar.match(m): + m='[tsk.inputs%s]'%m[3:] + elif re_novar.match(m): + m='[tsk.outputs%s]'%m[3:] + elif m[:3]not in('tsk','gen','bld'): + dvars.extend([var,m]) + m='%r'%m + app('lst.extend(tsk.colon(%r, %s))'%(var,m)) + else: + app('lst.extend(gen.to_list(%s%s))'%(var,meth)) + else: + app('lst.extend(to_list(env[%r]))'%var) + if not var in dvars:dvars.append(var) + if extr: + if params[-1]: + app("lst.extend(%r)"%params[-1].split()) + fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) + Logs.debug('action: %s'%fun.strip().splitlines()) + return(funex(fun),dvars) +def compile_fun(line,shell=False): + if isinstance(line,str): + if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: + shell=True + else: + dvars_lst=[] + funs_lst=[] + for x in line: + if isinstance(x,str): + fun,dvars=compile_fun(x,shell) + dvars_lst+=dvars + funs_lst.append(fun) + else: + funs_lst.append(x) + def composed_fun(task): + for x in funs_lst: + ret=x(task) + if ret: + return ret + return None + return composed_fun,dvars + if shell: + return compile_fun_shell(line) + else: + return compile_fun_noshell(line) +def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None): + params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,} + if isinstance(func,str)or isinstance(func,tuple): + params['run_str']=func + else: + params['run']=func + cls=type(Task)(name,(Task,),params) + global classes + classes[name]=cls + return cls +def always_run(cls): + old=cls.runnable_status + def always(self): + ret=old(self) + if ret==SKIP_ME: + ret=RUN_ME + return ret + cls.runnable_status=always + return cls +def update_outputs(cls): + old_post_run=cls.post_run + def post_run(self): + old_post_run(self) + for node in self.outputs: + node.sig=node.cache_sig=Utils.h_file(node.abspath()) + self.generator.bld.task_sigs[node.abspath()]=self.uid() + cls.post_run=post_run + old_runnable_status=cls.runnable_status + def runnable_status(self): + status=old_runnable_status(self) + if status!=RUN_ME: + return status + try: + bld=self.generator.bld + prev_sig=bld.task_sigs[self.uid()] + if prev_sig==self.signature(): + for x in self.outputs: + if not x.is_child_of(bld.bldnode): + x.sig=Utils.h_file(x.abspath()) + if not x.sig or bld.task_sigs[x.abspath()]!=self.uid(): + return RUN_ME + return SKIP_ME + except OSError: + pass + except IOError: + pass + except KeyError: + pass + except IndexError: + pass + except AttributeError: + pass + return RUN_ME + cls.runnable_status=runnable_status + return cls diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py new file mode 100644 index 00000000..d9f770ce --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py @@ -0,0 +1,433 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import copy,re,os +from waflib import Task,Utils,Logs,Errors,ConfigSet,Node +feats=Utils.defaultdict(set) +HEADER_EXTS=['.h','.hpp','.hxx','.hh'] +class task_gen(object): + mappings=Utils.ordered_iter_dict() + prec=Utils.defaultdict(list) + def __init__(self,*k,**kw): + self.source='' + self.target='' + self.meths=[] + self.prec=Utils.defaultdict(list) + self.mappings={} + self.features=[] + self.tasks=[] + if not'bld'in kw: + self.env=ConfigSet.ConfigSet() + self.idx=0 + self.path=None + else: + self.bld=kw['bld'] + self.env=self.bld.env.derive() + self.path=self.bld.path + try: + self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1 + except AttributeError: + self.bld.idx={} + self.idx=self.bld.idx[id(self.path)]=1 + for key,val in kw.items(): + setattr(self,key,val) + def __str__(self): + return""%(self.name,self.path.abspath()) + def __repr__(self): + lst=[] + for x in self.__dict__.keys(): + if x not in('env','bld','compiled_tasks','tasks'): + lst.append("%s=%s"%(x,repr(getattr(self,x)))) + return"bld(%s) in %s"%(", ".join(lst),self.path.abspath()) + def get_name(self): + try: + return self._name + except AttributeError: + if isinstance(self.target,list): + lst=[str(x)for x in self.target] + name=self._name=','.join(lst) + else: + name=self._name=str(self.target) + return name + def set_name(self,name): + self._name=name + name=property(get_name,set_name) + def to_list(self,val): + if isinstance(val,str):return val.split() + else:return val + def post(self): + if getattr(self,'posted',None): + return False + self.posted=True + keys=set(self.meths) + self.features=Utils.to_list(self.features) + for x in self.features+['*']: + st=feats[x] + if not st: + if not x in Task.classes: + Logs.warn('feature %r does not exist - bind at least one method to it'%x) + keys.update(list(st)) + prec={} + prec_tbl=self.prec or task_gen.prec + for x in prec_tbl: + if x in keys: + prec[x]=prec_tbl[x] + tmp=[] + for a in keys: + for x in prec.values(): + if a in x:break + else: + tmp.append(a) + tmp.sort() + out=[] + while tmp: + e=tmp.pop() + if e in keys:out.append(e) + try: + nlst=prec[e] + except KeyError: + pass + else: + del prec[e] + for x in nlst: + for y in prec: + if x in prec[y]: + break + else: + tmp.append(x) + if prec: + raise Errors.WafError('Cycle detected in the method execution %r'%prec) + out.reverse() + self.meths=out + Logs.debug('task_gen: posting %s %d'%(self,id(self))) + for x in out: + try: + v=getattr(self,x) + except AttributeError: + raise Errors.WafError('%r is not a valid task generator method'%x) + Logs.debug('task_gen: -> %s (%d)'%(x,id(self))) + v() + Logs.debug('task_gen: posted %s'%self.name) + return True + def get_hook(self,node): + name=node.name + if self.mappings: + for k in self.mappings: + if name.endswith(k): + return self.mappings[k] + for k in task_gen.mappings: + if name.endswith(k): + return task_gen.mappings[k] + raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)"%(node,task_gen.mappings.keys())) + def create_task(self,name,src=None,tgt=None,**kw): + task=Task.classes[name](env=self.env.derive(),generator=self) + if src: + task.set_inputs(src) + if tgt: + task.set_outputs(tgt) + task.__dict__.update(kw) + self.tasks.append(task) + return task + def clone(self,env): + newobj=self.bld() + for x in self.__dict__: + if x in('env','bld'): + continue + elif x in('path','features'): + setattr(newobj,x,getattr(self,x)) + else: + setattr(newobj,x,copy.copy(getattr(self,x))) + newobj.posted=False + if isinstance(env,str): + newobj.env=self.bld.all_envs[env].derive() + else: + newobj.env=env.derive() + return newobj +def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False): + ext_in=Utils.to_list(ext_in) + ext_out=Utils.to_list(ext_out) + if not name: + name=rule + cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell) + def x_file(self,node): + ext=decider and decider(self,node)or cls.ext_out + if ext_in: + _ext_in=ext_in[0] + tsk=self.create_task(name,node) + cnt=0 + keys=set(self.mappings.keys())|set(self.__class__.mappings.keys()) + for x in ext: + k=node.change_ext(x,ext_in=_ext_in) + tsk.outputs.append(k) + if reentrant!=None: + if cnt=','exact-version':'==','max-version':'<=',} +SNIP_FUNCTION=''' +int main(int argc, char **argv) { + void (*p)(); + (void)argc; (void)argv; + p=(void(*)())(%s); + return !p; +} +''' +SNIP_TYPE=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + if ((%(type_name)s *) 0) return 0; + if (sizeof (%(type_name)s)) return 0; + return 1; +} +''' +SNIP_EMPTY_PROGRAM=''' +int main(int argc, char **argv) { + (void)argc; (void)argv; + return 0; +} +''' +SNIP_FIELD=''' +int main(int argc, char **argv) { + char *off; + (void)argc; (void)argv; + off = (char*) &((%(type_name)s*)0)->%(field_name)s; + return (size_t) off < sizeof(%(type_name)s); +} +''' +MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'cygwin','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} +MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',} +@conf +def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None): + assert(isinstance(line,str)) + env=env or self.env + if posix is None: + posix=True + if'\\'in line: + posix=('\\ 'in line)or('\\\\'in line) + lex=shlex.shlex(line,posix=posix) + lex.whitespace_split=True + lex.commenters='' + lst=list(lex) + app=env.append_value + appu=env.append_unique + uselib=uselib_store + static=False + while lst: + x=lst.pop(0) + st=x[:2] + ot=x[2:] + if st=='-I'or st=='/I': + if not ot:ot=lst.pop(0) + appu('INCLUDES_'+uselib,[ot]) + elif st=='-i': + tmp=[x,lst.pop(0)] + app('CFLAGS',tmp) + app('CXXFLAGS',tmp) + elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'): + if not ot:ot=lst.pop(0) + app('DEFINES_'+uselib,[ot]) + elif st=='-l': + if not ot:ot=lst.pop(0) + prefix=(force_static or static)and'STLIB_'or'LIB_' + appu(prefix+uselib,[ot]) + elif st=='-L': + if not ot:ot=lst.pop(0) + prefix=(force_static or static)and'STLIBPATH_'or'LIBPATH_' + appu(prefix+uselib,[ot]) + elif x.startswith('/LIBPATH:'): + prefix=(force_static or static)and'STLIBPATH_'or'LIBPATH_' + appu(prefix+uselib,[x.replace('/LIBPATH:','')]) + elif x=='-pthread'or x.startswith('+')or x.startswith('-std'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + app('LINKFLAGS_'+uselib,[x]) + elif x=='-framework': + appu('FRAMEWORK_'+uselib,[lst.pop(0)]) + elif x.startswith('-F'): + appu('FRAMEWORKPATH_'+uselib,[x[2:]]) + elif x=='-Wl,-rpath'or x=='-Wl,-R': + app('RPATH_'+uselib,lst.pop(0).lstrip('-Wl,')) + elif x.startswith('-Wl,-R,'): + app('RPATH_'+uselib,x[7:]) + elif x.startswith('-Wl,-R'): + app('RPATH_'+uselib,x[6:]) + elif x.startswith('-Wl,-rpath,'): + app('RPATH_'+uselib,x[11:]) + elif x=='-Wl,-Bstatic'or x=='-Bstatic': + static=True + elif x=='-Wl,-Bdynamic'or x=='-Bdynamic': + static=False + elif x.startswith('-Wl'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'): + app('CFLAGS_'+uselib,[x]) + app('CXXFLAGS_'+uselib,[x]) + elif x.startswith('-bundle'): + app('LINKFLAGS_'+uselib,[x]) + elif x.startswith('-undefined')or x.startswith('-Xlinker'): + arg=lst.pop(0) + app('LINKFLAGS_'+uselib,[x,arg]) + elif x.startswith('-arch')or x.startswith('-isysroot'): + tmp=[x,lst.pop(0)] + app('CFLAGS_'+uselib,tmp) + app('CXXFLAGS_'+uselib,tmp) + app('LINKFLAGS_'+uselib,tmp) + elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'): + appu('LINKFLAGS_'+uselib,[x]) +@conf +def validate_cfg(self,kw): + if not'path'in kw: + if not self.env.PKGCONFIG: + self.find_program('pkg-config',var='PKGCONFIG') + kw['path']=self.env.PKGCONFIG + if'atleast_pkgconfig_version'in kw: + if not'msg'in kw: + kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version'] + return + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'errmsg'in kw: + kw['errmsg']='not found' + if'modversion'in kw: + if not'msg'in kw: + kw['msg']='Checking for %r version'%kw['modversion'] + return + for x in cfg_ver.keys(): + y=x.replace('-','_') + if y in kw: + if not'package'in kw: + raise ValueError('%s requires a package'%x) + if not'msg'in kw: + kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y]) + return + if not'define_name'in kw: + pkgname=kw.get('uselib_store',kw['package'].upper()) + kw['define_name']=self.have_define(pkgname) + if not'uselib_store'in kw: + self.undefine(kw['define_name']) + if not'msg'in kw: + kw['msg']='Checking for %r'%(kw['package']or kw['path']) +@conf +def exec_cfg(self,kw): + path=Utils.to_list(kw['path']) + env=self.env.env or None + def define_it(): + pkgname=kw.get('uselib_store',kw['package'].upper()) + if kw.get('global_define'): + self.define(self.have_define(kw['package']),1,False) + else: + self.env.append_unique('DEFINES_%s'%pkgname,"%s=1"%self.have_define(pkgname)) + self.env[self.have_define(pkgname)]=1 + if'atleast_pkgconfig_version'in kw: + cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']] + self.cmd_and_log(cmd,env=env) + if not'okmsg'in kw: + kw['okmsg']='yes' + return + for x in cfg_ver: + y=x.replace('-','_') + if y in kw: + self.cmd_and_log(path+['--%s=%s'%(x,kw[y]),kw['package']],env=env) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + break + if'modversion'in kw: + version=self.cmd_and_log(path+['--modversion',kw['modversion']],env=env).strip() + self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version) + return version + lst=[]+path + defi=kw.get('define_variable',None) + if not defi: + defi=self.env.PKG_CONFIG_DEFINES or{} + for key,val in defi.items(): + lst.append('--define-variable=%s=%s'%(key,val)) + static=kw.get('force_static',False) + if'args'in kw: + args=Utils.to_list(kw['args']) + if'--static'in args or'--static-libs'in args: + static=True + lst+=args + lst.extend(Utils.to_list(kw['package'])) + if'variables'in kw: + v_env=kw.get('env',self.env) + uselib=kw.get('uselib_store',kw['package'].upper()) + vars=Utils.to_list(kw['variables']) + for v in vars: + val=self.cmd_and_log(lst+['--variable='+v],env=env).strip() + var='%s_%s'%(uselib,v) + v_env[var]=val + if not'okmsg'in kw: + kw['okmsg']='yes' + return + ret=self.cmd_and_log(lst,env=env) + if not'okmsg'in kw: + kw['okmsg']='yes' + define_it() + self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static,posix=kw.get('posix',None)) + return ret +@conf +def check_cfg(self,*k,**kw): + if k: + lst=k[0].split() + kw['package']=lst[0] + kw['args']=' '.join(lst[1:]) + self.validate_cfg(kw) + if'msg'in kw: + self.start_msg(kw['msg'],**kw) + ret=None + try: + ret=self.exec_cfg(kw) + except self.errors.WafError: + if'errmsg'in kw: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + if not ret: + ret=True + kw['success']=ret + if'okmsg'in kw: + self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + return ret +def build_fun(bld): + if bld.kw['compile_filename']: + node=bld.srcnode.make_node(bld.kw['compile_filename']) + node.write(bld.kw['code']) + o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog') + for k,v in bld.kw.items(): + setattr(o,k,v) + if not bld.kw.get('quiet',None): + bld.conf.to_log("==>\n%s\n<=="%bld.kw['code']) +@conf +def validate_c(self,kw): + if not'build_fun'in kw: + kw['build_fun']=build_fun + if not'env'in kw: + kw['env']=self.env.derive() + env=kw['env'] + if not'compiler'in kw and not'features'in kw: + kw['compiler']='c' + if env['CXX_NAME']and Task.classes.get('cxx',None): + kw['compiler']='cxx' + if not self.env['CXX']: + self.fatal('a c++ compiler is required') + else: + if not self.env['CC']: + self.fatal('a c compiler is required') + if not'compile_mode'in kw: + kw['compile_mode']='c' + if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx': + kw['compile_mode']='cxx' + if not'type'in kw: + kw['type']='cprogram' + if not'features'in kw: + if not'header_name'in kw or kw.get('link_header_test',True): + kw['features']=[kw['compile_mode'],kw['type']] + else: + kw['features']=[kw['compile_mode']] + else: + kw['features']=Utils.to_list(kw['features']) + if not'compile_filename'in kw: + kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') + def to_header(dct): + if'header_name'in dct: + dct=Utils.to_list(dct['header_name']) + return''.join(['#include <%s>\n'%x for x in dct]) + return'' + if'framework_name'in kw: + fwkname=kw['framework_name'] + if not'uselib_store'in kw: + kw['uselib_store']=fwkname.upper() + if not kw.get('no_header',False): + if not'header_name'in kw: + kw['header_name']=[] + fwk='%s/%s.h'%(fwkname,fwkname) + if kw.get('remove_dot_h',None): + fwk=fwk[:-2] + kw['header_name']=Utils.to_list(kw['header_name'])+[fwk] + kw['msg']='Checking for framework %s'%fwkname + kw['framework']=fwkname + if'function_name'in kw: + fu=kw['function_name'] + if not'msg'in kw: + kw['msg']='Checking for function %s'%fu + kw['code']=to_header(kw)+SNIP_FUNCTION%fu + if not'uselib_store'in kw: + kw['uselib_store']=fu.upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(fu) + elif'type_name'in kw: + tu=kw['type_name'] + if not'header_name'in kw: + kw['header_name']='stdint.h' + if'field_name'in kw: + field=kw['field_name'] + kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field} + if not'msg'in kw: + kw['msg']='Checking for field %s in %s'%(field,tu) + if not'define_name'in kw: + kw['define_name']=self.have_define((tu+'_'+field).upper()) + else: + kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu} + if not'msg'in kw: + kw['msg']='Checking for type %s'%tu + if not'define_name'in kw: + kw['define_name']=self.have_define(tu.upper()) + elif'header_name'in kw: + if not'msg'in kw: + kw['msg']='Checking for header %s'%kw['header_name'] + l=Utils.to_list(kw['header_name']) + assert len(l)>0,'list of headers in header_name is empty' + kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM + if not'uselib_store'in kw: + kw['uselib_store']=l[0].upper() + if not'define_name'in kw: + kw['define_name']=self.have_define(l[0]) + if'lib'in kw: + if not'msg'in kw: + kw['msg']='Checking for library %s'%kw['lib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['lib'].upper() + if'stlib'in kw: + if not'msg'in kw: + kw['msg']='Checking for static library %s'%kw['stlib'] + if not'uselib_store'in kw: + kw['uselib_store']=kw['stlib'].upper() + if'fragment'in kw: + kw['code']=kw['fragment'] + if not'msg'in kw: + kw['msg']='Checking for code snippet' + if not'errmsg'in kw: + kw['errmsg']='no' + for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')): + if flagsname in kw: + if not'msg'in kw: + kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) + if not'errmsg'in kw: + kw['errmsg']='no' + if not'execute'in kw: + kw['execute']=False + if kw['execute']: + kw['features'].append('test_exec') + kw['chmod']=493 + if not'errmsg'in kw: + kw['errmsg']='not found' + if not'okmsg'in kw: + kw['okmsg']='yes' + if not'code'in kw: + kw['code']=SNIP_EMPTY_PROGRAM + if self.env[INCKEYS]: + kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code'] + if kw.get('merge_config_header',False)or env.merge_config_header: + kw['code']='%s\n\n%s'%(self.get_config_header(),kw['code']) + env.DEFINES=[] + if not kw.get('success'):kw['success']=None + if'define_name'in kw: + self.undefine(kw['define_name']) + if not'msg'in kw: + self.fatal('missing "msg" in conf.check(...)') +@conf +def post_check(self,*k,**kw): + is_success=0 + if kw['execute']: + if kw['success']is not None: + if kw.get('define_ret',False): + is_success=kw['success'] + else: + is_success=(kw['success']==0) + else: + is_success=(kw['success']==0) + if'define_name'in kw: + if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw: + if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str): + self.define(kw['define_name'],is_success,quote=kw.get('quote',1)) + else: + self.define_cond(kw['define_name'],is_success) + else: + self.define_cond(kw['define_name'],is_success) + if kw.get('global_define',None): + self.env[kw['define_name']]=is_success + if'header_name'in kw: + if kw.get('auto_add_header_name',False): + self.env.append_value(INCKEYS,Utils.to_list(kw['header_name'])) + if is_success and'uselib_store'in kw: + from waflib.Tools import ccroot + _vars=set([]) + for x in kw['features']: + if x in ccroot.USELIB_VARS: + _vars|=ccroot.USELIB_VARS[x] + for k in _vars: + lk=k.lower() + if lk in kw: + val=kw[lk] + if isinstance(val,str): + val=val.rstrip(os.path.sep) + self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val)) + return is_success +@conf +def check(self,*k,**kw): + self.validate_c(kw) + self.start_msg(kw['msg'],**kw) + ret=None + try: + ret=self.run_build(*k,**kw) + except self.errors.ConfigurationError: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + if Logs.verbose>1: + raise + else: + self.fatal('The configuration failed') + else: + kw['success']=ret + ret=self.post_check(*k,**kw) + if not ret: + self.end_msg(kw['errmsg'],'YELLOW',**kw) + self.fatal('The configuration failed %r'%ret) + else: + self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) + return ret +class test_exec(Task.Task): + color='PINK' + def run(self): + if getattr(self.generator,'rpath',None): + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()]) + else: + env=self.env.env or{} + env.update(dict(os.environ)) + for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'): + env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'') + if getattr(self.generator,'define_ret',False): + self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env) + else: + self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env) +@feature('test_exec') +@after_method('apply_link') +def test_exec_fun(self): + self.create_task('test_exec',self.link_task.outputs[0]) +@conf +def check_cxx(self,*k,**kw): + kw['compiler']='cxx' + return self.check(*k,**kw) +@conf +def check_cc(self,*k,**kw): + kw['compiler']='c' + return self.check(*k,**kw) +@conf +def set_define_comment(self,key,comment): + coms=self.env.DEFINE_COMMENTS + if not coms: + coms=self.env.DEFINE_COMMENTS={} + coms[key]=comment or'' +@conf +def get_define_comment(self,key): + coms=self.env.DEFINE_COMMENTS or{} + return coms.get(key,'') +@conf +def define(self,key,val,quote=True,comment=''): + assert key and isinstance(key,str) + if val is True: + val=1 + elif val in(False,None): + val=0 + if isinstance(val,int)or isinstance(val,float): + s='%s=%s' + else: + s=quote and'%s="%s"'or'%s=%s' + app=s%(key,str(val)) + ban=key+'=' + lst=self.env['DEFINES'] + for x in lst: + if x.startswith(ban): + lst[lst.index(x)]=app + break + else: + self.env.append_value('DEFINES',app) + self.env.append_unique(DEFKEYS,key) + self.set_define_comment(key,comment) +@conf +def undefine(self,key,comment=''): + assert key and isinstance(key,str) + ban=key+'=' + lst=[x for x in self.env['DEFINES']if not x.startswith(ban)] + self.env['DEFINES']=lst + self.env.append_unique(DEFKEYS,key) + self.set_define_comment(key,comment) +@conf +def define_cond(self,key,val): + assert key and isinstance(key,str) + if val: + self.define(key,1) + else: + self.undefine(key) +@conf +def is_defined(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return True + return False +@conf +def get_define(self,key): + assert key and isinstance(key,str) + ban=key+'=' + for x in self.env['DEFINES']: + if x.startswith(ban): + return x[len(ban):] + return None +@conf +def have_define(self,key): + return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key) +@conf +def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''): + if not configfile:configfile=WAF_CONFIG_H + waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile) + node=top and self.bldnode or self.path.get_bld() + node=node.make_node(configfile) + node.parent.mkdir() + lst=['/* WARNING! All changes made to this file will be lost! */\n'] + lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard)) + lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix)) + lst.append('\n#endif /* %s */\n'%waf_guard) + node.write('\n'.join(lst)) + self.env.append_unique(Build.CFG_FILES,[node.abspath()]) + if remove: + for key in self.env[DEFKEYS]: + self.undefine(key) + self.env[DEFKEYS]=[] +@conf +def get_config_header(self,defines=True,headers=False,define_prefix=''): + lst=[] + if self.env.WAF_CONFIG_H_PRELUDE: + lst.append(self.env.WAF_CONFIG_H_PRELUDE) + if headers: + for x in self.env[INCKEYS]: + lst.append('#include <%s>'%x) + if defines: + tbl={} + for k in self.env['DEFINES']: + a,_,b=k.partition('=') + tbl[a]=b + for k in self.env[DEFKEYS]: + caption=self.get_define_comment(k) + if caption: + caption=' /* %s */'%caption + try: + txt='#define %s%s %s%s'%(define_prefix,k,tbl[k],caption) + except KeyError: + txt='/* #undef %s%s */%s'%(define_prefix,k,caption) + lst.append(txt) + return"\n".join(lst) +@conf +def cc_add_flags(conf): + conf.add_os_flags('CPPFLAGS',dup=False) + conf.add_os_flags('CFLAGS',dup=False) +@conf +def cxx_add_flags(conf): + conf.add_os_flags('CPPFLAGS',dup=False) + conf.add_os_flags('CXXFLAGS',dup=False) +@conf +def link_add_flags(conf): + conf.add_os_flags('LINKFLAGS',dup=False) + conf.add_os_flags('LDFLAGS',dup=False) +@conf +def cc_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('c') +@conf +def cxx_load_tools(conf): + if not conf.env.DEST_OS: + conf.env.DEST_OS=Utils.unversioned_sys_platform() + conf.load('cxx') +@conf +def get_cc_version(conf,cc,gcc=False,icc=False,clang=False): + cmd=cc+['-dM','-E','-'] + env=conf.env.env or None + try: + out,err=conf.cmd_and_log(cmd,output=0,input='\n'.encode(),env=env) + except Exception: + conf.fatal('Could not determine the compiler version %r'%cmd) + if gcc: + if out.find('__INTEL_COMPILER')>=0: + conf.fatal('The intel compiler pretends to be gcc') + if out.find('__GNUC__')<0 and out.find('__clang__')<0: + conf.fatal('Could not determine the compiler type') + if icc and out.find('__INTEL_COMPILER')<0: + conf.fatal('Not icc/icpc') + if clang and out.find('__clang__')<0: + conf.fatal('Not clang/clang++') + if not clang and out.find('__clang__')>=0: + conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') + k={} + if icc or gcc or clang: + out=out.splitlines() + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + if not conf.env.DEST_OS: + conf.env.DEST_OS='' + for i in MACRO_TO_DESTOS: + if isD(i): + conf.env.DEST_OS=MACRO_TO_DESTOS[i] + break + else: + if isD('__APPLE__')and isD('__MACH__'): + conf.env.DEST_OS='darwin' + elif isD('__unix__'): + conf.env.DEST_OS='generic' + if isD('__ELF__'): + conf.env.DEST_BINFMT='elf' + elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'): + conf.env.DEST_BINFMT='pe' + conf.env.LIBDIR=conf.env.BINDIR + elif isD('__APPLE__'): + conf.env.DEST_BINFMT='mac-o' + if not conf.env.DEST_BINFMT: + conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS) + for i in MACRO_TO_DEST_CPU: + if isD(i): + conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i] + break + Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')])) + if icc: + ver=k['__INTEL_COMPILER'] + conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1]) + else: + if isD('__clang__')and isD('__clang_major__'): + conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__']) + else: + conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k.get('__GNUC_PATCHLEVEL__','0')) + return k +@conf +def get_xlc_version(conf,cc): + cmd=cc+['-qversion'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError: + conf.fatal('Could not find xlc %r'%cmd) + for v in(r"IBM XL C/C\+\+.* V(?P\d*)\.(?P\d*)",): + version_re=re.compile(v,re.I).search + match=version_re(out or err) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + break + else: + conf.fatal('Could not determine the XLC version.') +@conf +def get_suncc_version(conf,cc): + cmd=cc+['-V'] + try: + out,err=conf.cmd_and_log(cmd,output=0) + except Errors.WafError as e: + if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')): + conf.fatal('Could not find suncc %r'%cmd) + out=e.stdout + err=e.stderr + version=(out or err) + version=version.splitlines()[0] + version_re=re.compile(r'cc:\s+sun\s+(c\+\+|c)\s+(?P\d*)\.(?P\d*)',re.I).search + match=version_re(version) + if match: + k=match.groupdict() + conf.env['CC_VERSION']=(k['major'],k['minor']) + else: + conf.fatal('Could not determine the suncc version.') +@conf +def add_as_needed(self): + if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME): + self.env.append_unique('LINKFLAGS','-Wl,--as-needed') +class cfgtask(Task.TaskBase): + def display(self): + return'' + def runnable_status(self): + return Task.RUN_ME + def uid(self): + return Utils.SIG_NIL + def run(self): + conf=self.conf + bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath()) + bld.env=conf.env + bld.init_dirs() + bld.in_msg=1 + bld.logger=self.logger + try: + bld.check(**self.args) + except Exception: + return 1 +@conf +def multicheck(self,*k,**kw): + self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw) + class par(object): + def __init__(self): + self.keep=False + self.returned_tasks=[] + self.task_sigs={} + self.progress_bar=0 + def total(self): + return len(tasks) + def to_log(self,*k,**kw): + return + bld=par() + tasks=[] + for dct in k: + x=cfgtask(bld=bld) + tasks.append(x) + x.args=dct + x.bld=bld + x.conf=self + x.args=dct + x.logger=Logs.make_mem_logger(str(id(x)),self.logger) + def it(): + yield tasks + while 1: + yield[] + p=Runner.Parallel(bld,Options.options.jobs) + p.biter=it() + p.start() + for x in tasks: + x.logger.memhandler.flush() + if p.error: + for x in p.error: + if getattr(x,'err_msg',None): + self.to_log(x.err_msg) + self.end_msg('fail',color='RED') + raise Errors.WafError('There is an error in the library, read config.log for more information') + for x in tasks: + if x.hasrun!=Task.SUCCESS: + self.end_msg(kw.get('errmsg','no'),color='YELLOW',**kw) + self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, read config.log for more information') + self.end_msg('ok',**kw) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py new file mode 100644 index 00000000..8cb4bce1 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py @@ -0,0 +1,137 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,shutil,platform +from waflib import Task,Utils,Errors +from waflib.TaskGen import taskgen_method,feature,after_method,before_method +app_info=''' + + + + + CFBundlePackageType + APPL + CFBundleGetInfoString + Created by Waf + CFBundleSignature + ???? + NOTE + THIS IS A GENERATED FILE, DO NOT MODIFY + CFBundleExecutable + {app_name} + + +''' +@feature('c','cxx') +def set_macosx_deployment_target(self): + if self.env['MACOSX_DEPLOYMENT_TARGET']: + os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET'] + elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: + if Utils.unversioned_sys_platform()=='darwin': + os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) +@taskgen_method +def create_bundle_dirs(self,name,out): + dir=out.parent.find_or_declare(name) + dir.mkdir() + macos=dir.find_or_declare(['Contents','MacOS']) + macos.mkdir() + return dir +def bundle_name_for_output(out): + name=out.name + k=name.rfind('.') + if k>=0: + name=name[:k]+'.app' + else: + name=name+'.app' + return name +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macapp(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','MacOS',out.name]) + self.apptask=self.create_task('macapp',self.link_task.outputs,n1) + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name + self.bld.install_files(inst_to,n1,chmod=Utils.O755) + if getattr(self,'mac_files',None): + mac_files_root=getattr(self,'mac_files_root',None) + if isinstance(mac_files_root,str): + mac_files_root=self.path.find_node(mac_files_root) + if not mac_files_root: + self.bld.fatal('Invalid mac_files_root %r'%self.mac_files_root) + res_dir=n1.parent.parent.make_node('Resources') + inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + for node in self.to_nodes(self.mac_files): + relpath=node.path_from(mac_files_root or node.parent) + self.create_task('macapp',node,res_dir.make_node(relpath)) + self.bld.install_as(os.path.join(inst_to,relpath),node) + if getattr(self,'mac_resources',None): + res_dir=n1.parent.parent.make_node('Resources') + inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name + for x in self.to_list(self.mac_resources): + node=self.path.find_node(x) + if not node: + raise Errors.WafError('Missing mac_resource %r in %r'%(x,self)) + parent=node.parent + if os.path.isdir(node.abspath()): + nodes=node.ant_glob('**') + else: + nodes=[node] + for node in nodes: + rel=node.path_from(parent) + self.create_task('macapp',node,res_dir.make_node(rel)) + self.bld.install_as(inst_to+'/%s'%rel,node) + if getattr(self.bld,'is_install',None): + self.install_task.hasrun=Task.SKIP_ME +@feature('cprogram','cxxprogram') +@after_method('apply_link') +def create_task_macplist(self): + if self.env['MACAPP']or getattr(self,'mac_app',False): + out=self.link_task.outputs[0] + name=bundle_name_for_output(out) + dir=self.create_bundle_dirs(name,out) + n1=dir.find_or_declare(['Contents','Info.plist']) + self.plisttask=plisttask=self.create_task('macplist',[],n1) + plisttask.context={'app_name':self.link_task.outputs[0].name,'env':self.env} + plist_ctx=getattr(self,'plist_context',None) + if(plist_ctx): + plisttask.context.update(plist_ctx) + if getattr(self,'mac_plist',False): + node=self.path.find_resource(self.mac_plist) + if node: + plisttask.inputs.append(node) + else: + plisttask.code=self.mac_plist + else: + plisttask.code=app_info + inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name + self.bld.install_files(inst_to,n1) +@feature('cshlib','cxxshlib') +@before_method('apply_link','propagate_uselib_vars') +def apply_bundle(self): + if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): + self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[] + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN'] + use=self.use=self.to_list(getattr(self,'use',[])) + if not'MACBUNDLE'in use: + use.append('MACBUNDLE') +app_dirs=['Contents','Contents/MacOS','Contents/Resources'] +class macapp(Task.Task): + color='PINK' + def run(self): + self.outputs[0].parent.mkdir() + shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath()) +class macplist(Task.Task): + color='PINK' + ext_in=['.bin'] + def run(self): + if getattr(self,'code',None): + txt=self.code + else: + txt=self.inputs[0].read() + context=getattr(self,'context',{}) + txt=txt.format(**context) + self.outputs[0].write(txt) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py new file mode 100644 index 00000000..4a2d12cd --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py @@ -0,0 +1,611 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re,string,traceback +from waflib import Logs,Utils,Errors +from waflib.Logs import debug,error +class PreprocError(Errors.WafError): + pass +POPFILE='-' +recursion_limit=150 +go_absolute=False +standard_includes=['/usr/include'] +if Utils.is_win32: + standard_includes=[] +use_trigraphs=0 +strict_quotes=0 +g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',} +re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) +re_mac=re.compile("^[a-zA-Z_]\w*") +re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') +re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE) +re_nl=re.compile('\\\\\r*\n',re.MULTILINE) +re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE) +trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')] +chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} +NUM='i' +OP='O' +IDENT='T' +STR='s' +CHAR='c' +tok_types=[NUM,STR,IDENT,OP] +exp_types=[r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] +re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) +accepted='a' +ignored='i' +undefined='u' +skipped='s' +def repl(m): + s=m.group(0) + if s.startswith('/'): + return' ' + return s +def filter_comments(filename): + code=Utils.readf(filename) + if use_trigraphs: + for(a,b)in trig_def:code=code.split(a).join(b) + code=re_nl.sub('',code) + code=re_cpp.sub(repl,code) + return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)] +prec={} +ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] +for x in range(len(ops)): + syms=ops[x] + for u in syms.split(): + prec[u]=x +def trimquotes(s): + if not s:return'' + s=s.rstrip() + if s[0]=="'"and s[-1]=="'":return s[1:-1] + return s +def reduce_nums(val_1,val_2,val_op): + try:a=0+val_1 + except TypeError:a=int(val_1) + try:b=0+val_2 + except TypeError:b=int(val_2) + d=val_op + if d=='%':c=a%b + elif d=='+':c=a+b + elif d=='-':c=a-b + elif d=='*':c=a*b + elif d=='/':c=a/b + elif d=='^':c=a^b + elif d=='==':c=int(a==b) + elif d=='|'or d=='bitor':c=a|b + elif d=='||'or d=='or':c=int(a or b) + elif d=='&'or d=='bitand':c=a&b + elif d=='&&'or d=='and':c=int(a and b) + elif d=='!='or d=='not_eq':c=int(a!=b) + elif d=='^'or d=='xor':c=int(a^b) + elif d=='<=':c=int(a<=b) + elif d=='<':c=int(a':c=int(a>b) + elif d=='>=':c=int(a>=b) + elif d=='<<':c=a<>':c=a>>b + else:c=0 + return c +def get_num(lst): + if not lst:raise PreprocError("empty list for get_num") + (p,v)=lst[0] + if p==OP: + if v=='(': + count_par=1 + i=1 + while i=prec[v]: + num2=reduce_nums(num,num2,v) + return get_term([(NUM,num2)]+lst) + else: + num3,lst=get_num(lst[1:]) + num3=reduce_nums(num2,num3,v2) + return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) + raise PreprocError("cannot reduce %r"%lst) +def reduce_eval(lst): + num,lst=get_term(lst) + return(NUM,num) +def stringize(lst): + lst=[str(v2)for(p2,v2)in lst] + return"".join(lst) +def paste_tokens(t1,t2): + p1=None + if t1[0]==OP and t2[0]==OP: + p1=OP + elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): + p1=IDENT + elif t1[0]==NUM and t2[0]==NUM: + p1=NUM + if not p1: + raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) + return(p1,t1[1]+t2[1]) +def reduce_tokens(lst,defs,ban=[]): + i=0 + while i=len(lst): + raise PreprocError("expected '(' after %r (got nothing)"%v) + (p2,v2)=lst[i] + if p2!=OP or v2!='(': + raise PreprocError("expected '(' after %r"%v) + del lst[i] + one_param=[] + count_paren=0 + while i1: + (p3,v3)=accu[-1] + (p4,v4)=accu[-2] + if v3=='##': + accu.pop() + if v4==','and pt1: + return(v,[[],t[1:]]) + else: + return(v,[[],[('T','')]]) +re_include=re.compile('^\s*(<(?P.*)>|"(?P.*)")') +def extract_include(txt,defs): + m=re_include.search(txt) + if m: + if m.group('a'):return'<',m.group('a') + if m.group('b'):return'"',m.group('b') + toks=tokenize(txt) + reduce_tokens(toks,defs,['waf_include']) + if not toks: + raise PreprocError("could not parse include %s"%txt) + if len(toks)==1: + if toks[0][0]==STR: + return'"',toks[0][1] + else: + if toks[0][1]=='<'and toks[-1][1]=='>': + ret='<',stringize(toks).lstrip('<').rstrip('>') + return ret + raise PreprocError("could not parse include %s."%txt) +def parse_char(txt): + if not txt:raise PreprocError("attempted to parse a null char") + if txt[0]!='\\': + return ord(txt) + c=txt[1] + if c=='x': + if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16) + return int(txt[2:],16) + elif c.isdigit(): + if c=='0'and len(txt)==2:return 0 + for i in 3,2,1: + if len(txt)>i and txt[1:1+i].isdigit(): + return(1+i,int(txt[1:1+i],8)) + else: + try:return chr_esc[c] + except KeyError:raise PreprocError("could not parse char literal '%s'"%txt) +def tokenize(s): + return tokenize_private(s)[:] +@Utils.run_once +def tokenize_private(s): + ret=[] + for match in re_clexer.finditer(s): + m=match.group + for name in tok_types: + v=m(name) + if v: + if name==IDENT: + try: + g_optrans[v]; + name=OP + except KeyError: + if v.lower()=="true": + v=1 + name=NUM + elif v.lower()=="false": + v=0 + name=NUM + elif name==NUM: + if m('oct'):v=int(v,8) + elif m('hex'):v=int(m('hex'),16) + elif m('n0'):v=m('n0') + else: + v=m('char') + if v:v=parse_char(v) + else:v=m('n2')or m('n4') + elif name==OP: + if v=='%:':v='#' + elif v=='%:%:':v='##' + elif name==STR: + v=v[1:-1] + ret.append((name,v)) + break + return ret +@Utils.run_once +def define_name(line): + return re_mac.match(line).group(0) +class c_parser(object): + def __init__(self,nodepaths=None,defines=None): + self.lines=[] + if defines is None: + self.defs={} + else: + self.defs=dict(defines) + self.state=[] + self.count_files=0 + self.currentnode_stack=[] + self.nodepaths=nodepaths or[] + self.nodes=[] + self.names=[] + self.curfile='' + self.ban_includes=set([]) + def cached_find_resource(self,node,filename): + try: + nd=node.ctx.cache_nd + except AttributeError: + nd=node.ctx.cache_nd={} + tup=(node,filename) + try: + return nd[tup] + except KeyError: + ret=node.find_resource(filename) + if ret: + if getattr(ret,'children',None): + ret=None + elif ret.is_child_of(node.ctx.bldnode): + tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) + if tmp and getattr(tmp,'children',None): + ret=None + nd[tup]=ret + return ret + def tryfind(self,filename): + if filename.endswith('.moc'): + self.names.append(filename) + return None + self.curfile=filename + found=self.cached_find_resource(self.currentnode_stack[-1],filename) + for n in self.nodepaths: + if found: + break + found=self.cached_find_resource(n,filename) + if found and not found in self.ban_includes: + self.nodes.append(found) + self.addlines(found) + else: + if not filename in self.names: + self.names.append(filename) + return found + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>recursion_limit: + raise PreprocError("recursion limit exceeded") + pc=self.parse_cache + debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=filter_comments(filepath) + lines.append((POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + error("parsing %s failed"%filepath) + traceback.print_exc() + def start(self,node,env): + debug('preproc: scanning %s (in %s)',node.name,node.parent.name) + bld=node.ctx + try: + self.parse_cache=bld.parse_cache + except AttributeError: + self.parse_cache=bld.parse_cache={} + self.current_file=node + self.addlines(node) + if env['DEFINES']: + try: + lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]] + lst.reverse() + self.lines.extend([('define',x)for x in lst]) + except AttributeError: + pass + while self.lines: + (token,line)=self.lines.pop() + if token==POPFILE: + self.count_files-=1 + self.currentnode_stack.pop() + continue + try: + ve=Logs.verbose + if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state) + state=self.state + if token[:2]=='if': + state.append(undefined) + elif token=='endif': + state.pop() + if token[0]!='e': + if skipped in self.state or ignored in self.state: + continue + if token=='if': + ret=eval_macro(tokenize(line),self.defs) + if ret:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifdef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=accepted + else:state[-1]=ignored + elif token=='ifndef': + m=re_mac.match(line) + if m and m.group(0)in self.defs:state[-1]=ignored + else:state[-1]=accepted + elif token=='include'or token=='import': + (kind,inc)=extract_include(line,self.defs) + if ve:debug('preproc: include found %s (%s) ',inc,kind) + if kind=='"'or not strict_quotes: + self.current_file=self.tryfind(inc) + if token=='import': + self.ban_includes.add(self.current_file) + elif token=='elif': + if state[-1]==accepted: + state[-1]=skipped + elif state[-1]==ignored: + if eval_macro(tokenize(line),self.defs): + state[-1]=accepted + elif token=='else': + if state[-1]==accepted:state[-1]=skipped + elif state[-1]==ignored:state[-1]=accepted + elif token=='define': + try: + self.defs[define_name(line)]=line + except Exception: + raise PreprocError("Invalid define line %s"%line) + elif token=='undef': + m=re_mac.match(line) + if m and m.group(0)in self.defs: + self.defs.__delitem__(m.group(0)) + elif token=='pragma': + if re_pragma_once.match(line.lower()): + self.ban_includes.add(self.current_file) + except Exception as e: + if Logs.verbose: + debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack()) +def scan(task): + global go_absolute + try: + incn=task.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator) + if go_absolute: + nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes] + else: + nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)] + tmp=c_parser(nodepaths) + tmp.start(task.inputs[0],task.env) + if Logs.verbose: + debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py new file mode 100644 index 00000000..7791f238 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py @@ -0,0 +1,152 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Task +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +LIB_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllexport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void) { return 9; } +''' +MAIN_CODE=''' +#ifdef _MSC_VER +#define testEXPORT __declspec(dllimport) +#else +#define testEXPORT +#endif +testEXPORT int lib_func(void); +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(lib_func() == 9); +} +''' +@feature('link_lib_test') +@before_method('process_source') +def link_lib_test_fun(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + rpath=[] + if getattr(self,'add_rpath',False): + rpath=[self.bld.path.get_bld().abspath()] + mode=self.mode + m='%s %s'%(mode,mode) + ex=self.test_exec and'test_exec'or'' + bld=self.bld + bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE) + bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE) + bld(features='%sshlib'%m,source='test.'+mode,target='test') + bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath) +@conf +def check_library(self,mode=None,test_exec=True): + if not mode: + mode='c' + if self.env.CXX: + mode='cxx' + self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,) +INLINE_CODE=''' +typedef int foo_t; +static %s foo_t static_foo () {return 0; } +%s foo_t foo () { + return 0; +} +''' +INLINE_VALUES=['inline','__inline__','__inline'] +@conf +def check_inline(self,**kw): + self.start_msg('Checking for inline') + if not'define_name'in kw: + kw['define_name']='INLINE_MACRO' + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx'] + else: + kw['features']=['c'] + for x in INLINE_VALUES: + kw['fragment']=INLINE_CODE%(x,x) + try: + self.check(**kw) + except self.errors.ConfigurationError: + continue + else: + self.end_msg(x) + if x!='inline': + self.define('inline',x,quote=False) + return x + self.fatal('could not use inline functions') +LARGE_FRAGMENT='''#include +int main(int argc, char **argv) { + (void)argc; (void)argv; + return !(sizeof(off_t) >= 8); +} +''' +@conf +def check_large_file(self,**kw): + if not'define_name'in kw: + kw['define_name']='HAVE_LARGEFILE' + if not'execute'in kw: + kw['execute']=True + if not'features'in kw: + if self.env.CXX: + kw['features']=['cxx','cxxprogram'] + else: + kw['features']=['c','cprogram'] + kw['fragment']=LARGE_FRAGMENT + kw['msg']='Checking for large file support' + ret=True + try: + if self.env.DEST_BINFMT!='pe': + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + if ret: + return True + kw['msg']='Checking for -D_FILE_OFFSET_BITS=64' + kw['defines']=['_FILE_OFFSET_BITS=64'] + try: + ret=self.check(**kw) + except self.errors.ConfigurationError: + pass + else: + self.define('_FILE_OFFSET_BITS',64) + return ret + self.fatal('There is no support for large files') +ENDIAN_FRAGMENT=''' +short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; +short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; +int use_ascii (int i) { + return ascii_mm[i] + ascii_ii[i]; +} +short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; +short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; +int use_ebcdic (int i) { + return ebcdic_mm[i] + ebcdic_ii[i]; +} +extern int foo; +''' +class grep_for_endianness(Task.Task): + color='PINK' + def run(self): + txt=self.inputs[0].read(flags='rb').decode('iso8859-1') + if txt.find('LiTTleEnDian')>-1: + self.generator.tmp.append('little') + elif txt.find('BIGenDianSyS')>-1: + self.generator.tmp.append('big') + else: + return-1 +@feature('grep_for_endianness') +@after_method('process_source') +def grep_for_endianness_fun(self): + self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0]) +@conf +def check_endianness(self): + tmp=[] + def check_msg(self): + return tmp[0] + self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg) + return tmp[0] diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py new file mode 100644 index 00000000..498a0abe --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py @@ -0,0 +1,447 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Task,Utils,Node,Errors +from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension +from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests +from waflib.Configure import conf +SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] +USELIB_VARS=Utils.defaultdict(set) +USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) +USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) +USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) +USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) +USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) +USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) +USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) +USELIB_VARS['asm']=set(['ASFLAGS']) +@taskgen_method +def create_compiled_task(self,name,node): + out='%s.%d.o'%(node.name,self.idx) + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task +@taskgen_method +def to_incnodes(self,inlst): + lst=[] + seen=set([]) + for x in self.to_list(inlst): + if x in seen or not x: + continue + seen.add(x) + if isinstance(x,Node.Node): + lst.append(x) + else: + if os.path.isabs(x): + lst.append(self.bld.root.make_node(x)or x) + else: + if x[0]=='#': + p=self.bld.bldnode.make_node(x[1:]) + v=self.bld.srcnode.make_node(x[1:]) + else: + p=self.path.get_bld().make_node(x) + v=self.path.make_node(x) + if p.is_child_of(self.bld.bldnode): + p.mkdir() + lst.append(p) + lst.append(v) + return lst +@feature('c','cxx','d','asm','fc','includes') +@after_method('propagate_uselib_vars','process_source') +def apply_incpaths(self): + lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES']) + self.includes_nodes=lst + self.env['INCPATHS']=[x.abspath()for x in lst] +class link_task(Task.Task): + color='YELLOW' + inst_to=None + chmod=Utils.O755 + def add_target(self,target): + if isinstance(target,str): + pattern=self.env[self.__class__.__name__+'_PATTERN'] + if not pattern: + pattern='%s' + folder,name=os.path.split(target) + if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None): + nums=self.generator.vnum.split('.') + if self.env.DEST_BINFMT=='pe': + name=name+'-'+nums[0] + elif self.env.DEST_OS=='openbsd': + pattern='%s.%s'%(pattern,nums[0]) + if len(nums)>=2: + pattern+='.%s'%nums[1] + if folder: + tmp=folder+os.sep+pattern%name + else: + tmp=pattern%name + target=self.generator.path.find_or_declare(tmp) + self.set_outputs(target) +class stlink_task(link_task): + run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' + chmod=Utils.O644 +def rm_tgt(cls): + old=cls.run + def wrap(self): + try:os.remove(self.outputs[0].abspath()) + except OSError:pass + return old(self) + setattr(cls,'run',wrap) +rm_tgt(stlink_task) +@feature('c','cxx','d','fc','asm') +@after_method('process_source') +def apply_link(self): + for x in self.features: + if x=='cprogram'and'cxx'in self.features: + x='cxxprogram' + elif x=='cshlib'and'cxx'in self.features: + x='cxxshlib' + if x in Task.classes: + if issubclass(Task.classes[x],link_task): + link=x + break + else: + return + objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] + self.link_task=self.create_task(link,objs) + self.link_task.add_target(self.target) + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod,task=self.link_task) +@taskgen_method +def use_rec(self,name,**kw): + if name in self.tmp_use_not or name in self.tmp_use_seen: + return + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + self.uselib.append(name) + self.tmp_use_not.add(name) + return + self.tmp_use_seen.append(name) + y.post() + y.tmp_use_objects=objects=kw.get('objects',True) + y.tmp_use_stlib=stlib=kw.get('stlib',True) + try: + link_task=y.link_task + except AttributeError: + y.tmp_use_var='' + else: + objects=False + if not isinstance(link_task,stlink_task): + stlib=False + y.tmp_use_var='LIB' + else: + y.tmp_use_var='STLIB' + p=self.tmp_use_prec + for x in self.to_list(getattr(y,'use',[])): + if self.env["STLIB_"+x]: + continue + try: + p[x].append(name) + except KeyError: + p[x]=[name] + self.use_rec(x,objects=objects,stlib=stlib) +@feature('c','cxx','d','use','fc') +@before_method('apply_incpaths','propagate_uselib_vars') +@after_method('apply_link','process_source') +def process_use(self): + use_not=self.tmp_use_not=set([]) + self.tmp_use_seen=[] + use_prec=self.tmp_use_prec={} + self.uselib=self.to_list(getattr(self,'uselib',[])) + self.includes=self.to_list(getattr(self,'includes',[])) + names=self.to_list(getattr(self,'use',[])) + for x in names: + self.use_rec(x) + for x in use_not: + if x in use_prec: + del use_prec[x] + out=[] + tmp=[] + for x in self.tmp_use_seen: + for k in use_prec.values(): + if x in k: + break + else: + tmp.append(x) + while tmp: + e=tmp.pop() + out.append(e) + try: + nlst=use_prec[e] + except KeyError: + pass + else: + del use_prec[e] + for x in nlst: + for y in use_prec: + if x in use_prec[y]: + break + else: + tmp.append(x) + if use_prec: + raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) + out.reverse() + link_task=getattr(self,'link_task',None) + for x in out: + y=self.bld.get_tgen_by_name(x) + var=y.tmp_use_var + if var and link_task: + if var=='LIB'or y.tmp_use_stlib or x in names: + self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) + self.link_task.dep_nodes.extend(y.link_task.outputs) + tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode) + self.env.append_unique(var+'PATH',[tmp_path]) + else: + if y.tmp_use_objects: + self.add_objects_from_tgen(y) + if getattr(y,'export_includes',None): + self.includes.extend(y.to_incnodes(y.export_includes)) + if getattr(y,'export_defines',None): + self.env.append_value('DEFINES',self.to_list(y.export_defines)) + for x in names: + try: + y=self.bld.get_tgen_by_name(x) + except Errors.WafError: + if not self.env['STLIB_'+x]and not x in self.uselib: + self.uselib.append(x) + else: + for k in self.to_list(getattr(y,'use',[])): + if not self.env['STLIB_'+k]and not k in self.uselib: + self.uselib.append(k) +@taskgen_method +def accept_node_to_link(self,node): + return not node.name.endswith('.pdb') +@taskgen_method +def add_objects_from_tgen(self,tg): + try: + link_task=self.link_task + except AttributeError: + pass + else: + for tsk in getattr(tg,'compiled_tasks',[]): + for x in tsk.outputs: + if self.accept_node_to_link(x): + link_task.inputs.append(x) +@taskgen_method +def get_uselib_vars(self): + _vars=set([]) + for x in self.features: + if x in USELIB_VARS: + _vars|=USELIB_VARS[x] + return _vars +@feature('c','cxx','d','fc','javac','cs','uselib','asm') +@after_method('process_use') +def propagate_uselib_vars(self): + _vars=self.get_uselib_vars() + env=self.env + app=env.append_value + feature_uselib=self.features+self.to_list(getattr(self,'uselib',[])) + for var in _vars: + y=var.lower() + val=getattr(self,y,[]) + if val: + app(var,self.to_list(val)) + for x in feature_uselib: + val=env['%s_%s'%(var,x)] + if val: + app(var,val) +@feature('cshlib','cxxshlib','fcshlib') +@after_method('apply_link') +def apply_implib(self): + if not self.env.DEST_BINFMT=='pe': + return + dll=self.link_task.outputs[0] + if isinstance(self.target,Node.Node): + name=self.target.name + else: + name=os.path.split(self.target)[1] + implib=self.env['implib_PATTERN']%name + implib=dll.parent.find_or_declare(implib) + self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath()) + self.link_task.outputs.append(implib) + if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': + node=self.path.find_resource(self.defs) + if not node: + raise Errors.WafError('invalid def file %r'%self.defs) + if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME): + self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode)) + self.link_task.dep_nodes.append(node) + else: + self.link_task.inputs.append(node) + if getattr(self,'install_task',None): + try: + inst_to=self.install_path_implib + except AttributeError: + try: + inst_to=self.install_path + except AttributeError: + inst_to='${IMPLIBDIR}' + self.install_task.dest='${BINDIR}' + if not self.env.IMPLIBDIR: + self.env.IMPLIBDIR=self.env.LIBDIR + self.implib_install_task=self.bld.install_files(inst_to,implib,env=self.env,chmod=self.link_task.chmod,task=self.link_task) +re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') +@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') +@after_method('apply_link','propagate_uselib_vars') +def apply_vnum(self): + if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): + return + link=self.link_task + if not re_vnum.match(self.vnum): + raise Errors.WafError('Invalid vnum %r for target %r'%(self.vnum,getattr(self,'name',self))) + nums=self.vnum.split('.') + node=link.outputs[0] + cnum=getattr(self,'cnum',str(nums[0])) + cnums=cnum.split('.') + if len(cnums)>len(nums)or nums[0:len(cnums)]!=cnums: + raise Errors.WafError('invalid compatibility version %s'%cnum) + libname=node.name + if libname.endswith('.dylib'): + name3=libname.replace('.dylib','.%s.dylib'%self.vnum) + name2=libname.replace('.dylib','.%s.dylib'%cnum) + else: + name3=libname+'.'+self.vnum + name2=libname+'.'+cnum + if self.env.SONAME_ST: + v=self.env.SONAME_ST%name2 + self.env.append_value('LINKFLAGS',v.split()) + if self.env.DEST_OS!='openbsd': + outs=[node.parent.find_or_declare(name3)] + if name2!=name3: + outs.append(node.parent.find_or_declare(name2)) + self.create_task('vnum',node,outs) + if getattr(self,'install_task',None): + self.install_task.hasrun=Task.SKIP_ME + bld=self.bld + path=self.install_task.dest + if self.env.DEST_OS=='openbsd': + libname=self.link_task.outputs[0].name + t1=bld.install_as('%s%s%s'%(path,os.sep,libname),node,env=self.env,chmod=self.link_task.chmod) + self.vnum_install_task=(t1,) + else: + t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod) + t3=bld.symlink_as(path+os.sep+libname,name3) + if name2!=name3: + t2=bld.symlink_as(path+os.sep+name2,name3) + self.vnum_install_task=(t1,t2,t3) + else: + self.vnum_install_task=(t1,t3) + if'-dynamiclib'in self.env['LINKFLAGS']: + try: + inst_to=self.install_path + except AttributeError: + inst_to=self.link_task.__class__.inst_to + if inst_to: + p=Utils.subst_vars(inst_to,self.env) + path=os.path.join(p,name2) + self.env.append_value('LINKFLAGS',['-install_name',path]) + self.env.append_value('LINKFLAGS','-Wl,-compatibility_version,%s'%cnum) + self.env.append_value('LINKFLAGS','-Wl,-current_version,%s'%self.vnum) +class vnum(Task.Task): + color='CYAN' + quient=True + ext_in=['.bin'] + def keyword(self): + return'Symlinking' + def run(self): + for x in self.outputs: + path=x.abspath() + try: + os.remove(path) + except OSError: + pass + try: + os.symlink(self.inputs[0].name,path) + except OSError: + return 1 +class fake_shlib(link_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +class fake_stlib(stlink_task): + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) +@conf +def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) +lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} +@feature('fake_lib') +def process_lib(self): + node=None + names=[x%self.name for x in lib_patterns[self.lib_type]] + for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: + if not isinstance(x,Node.Node): + x=self.bld.root.find_node(x)or self.path.find_node(x) + if not x: + continue + for y in names: + node=x.find_node(y) + if node: + node.sig=Utils.h_file(node.abspath()) + break + else: + continue + break + else: + raise Errors.WafError('could not find library %r'%self.name) + self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) + self.target=self.name +class fake_o(Task.Task): + def runnable_status(self): + return Task.SKIP_ME +@extension('.o','.obj') +def add_those_o_files(self,node): + tsk=self.create_task('fake_o',[],node) + try: + self.compiled_tasks.append(tsk) + except AttributeError: + self.compiled_tasks=[tsk] +@feature('fake_obj') +@before_method('process_source') +def process_objs(self): + for node in self.to_nodes(self.source): + self.add_those_o_files(node) + self.source=[] +@conf +def read_object(self,obj): + if not isinstance(obj,self.path.__class__): + obj=self.path.find_resource(obj) + return self(features='fake_obj',source=obj,name=obj.name) +@feature('cxxprogram','cprogram') +@after_method('apply_link','process_use') +def set_full_paths_hpux(self): + if self.env.DEST_OS!='hp-ux': + return + base=self.bld.bldnode.abspath() + for var in['LIBPATH','STLIBPATH']: + lst=[] + for x in self.env[var]: + if x.startswith('/'): + lst.append(x) + else: + lst.append(os.path.normpath(os.path.join(base,x))) + self.env[var]=lst diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py new file mode 100644 index 00000000..9379f5a4 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py @@ -0,0 +1,20 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar,gcc +from waflib.Configure import conf +@conf +def find_clang(conf): + cc=conf.find_program('clang',var='CC') + conf.get_cc_version(cc,clang=True) + conf.env.CC_NAME='clang' +def configure(conf): + conf.find_clang() + conf.find_program(['llvm-ar','ar'],var='AR') + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py new file mode 100644 index 00000000..fc97135e --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py @@ -0,0 +1,20 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar,gxx +from waflib.Configure import conf +@conf +def find_clangxx(conf): + cxx=conf.find_program('clang++',var='CXX') + conf.get_cc_version(cxx,clang=True) + conf.env.CXX_NAME='clang' +def configure(conf): + conf.find_clangxx() + conf.find_program(['llvm-ar','ar'],var='AR') + conf.find_ar() + conf.gxx_common_flags() + conf.gxx_modifier_platform() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py new file mode 100644 index 00000000..9d8f7f96 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py @@ -0,0 +1,40 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib.Tools import ccroot +from waflib import Utils +from waflib.Logs import debug +c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['gcc','clang'],} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=c_compiler.get(build_platform,c_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_c_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (C compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_c: %r'%e) + else: + if conf.env['CC']: + conf.end_msg(conf.env.get_flat('CC')) + conf.env['COMPILER_CC']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a C compiler!') +def options(opt): + test_for_compiler=default_compilers() + opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py']) + cc_compiler_opts=opt.add_option_group('Configuration options') + cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py new file mode 100644 index 00000000..67752294 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py @@ -0,0 +1,40 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib.Tools import ccroot +from waflib import Utils +from waflib.Logs import debug +cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'osf1V':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['g++','clang++']} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_cxx_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (C++ compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + debug('compiler_cxx: %r'%e) + else: + if conf.env['CXX']: + conf.end_msg(conf.env.get_flat('CXX')) + conf.env['COMPILER_CXX']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a C++ compiler!') +def options(opt): + test_for_compiler=default_compilers() + opt.load_special_tools('cxx_*.py') + cxx_compiler_opts=opt.add_option_group('Configuration options') + cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py new file mode 100644 index 00000000..3e13c0dc --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +d_compiler={'default':['gdc','dmd','ldc2']} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=d_compiler.get(build_platform,d_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_d_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_d')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (D compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_d: %r'%e) + else: + if conf.env.D: + conf.end_msg(conf.env.get_flat('D')) + conf.env['COMPILER_D']=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a D compiler!') +def options(opt): + test_for_compiler=default_compilers() + d_compiler_opts=opt.add_option_group('Configuration options') + d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler') + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py new file mode 100644 index 00000000..b31780ab --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py @@ -0,0 +1,39 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +from waflib.Tools import fc +fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']} +def default_compilers(): + build_platform=Utils.unversioned_sys_platform() + possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default']) + return' '.join(possible_compiler_list) +def configure(conf): + try:test_for_compiler=conf.options.check_fortran_compiler or default_compilers() + except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')") + for compiler in re.split('[ ,]+',test_for_compiler): + conf.env.stash() + conf.start_msg('Checking for %r (Fortran compiler)'%compiler) + try: + conf.load(compiler) + except conf.errors.ConfigurationError as e: + conf.env.revert() + conf.end_msg(False) + Logs.debug('compiler_fortran: %r'%e) + else: + if conf.env['FC']: + conf.end_msg(conf.env.get_flat('FC')) + conf.env.COMPILER_FORTRAN=compiler + break + conf.end_msg(False) + else: + conf.fatal('could not configure a Fortran compiler!') +def options(opt): + test_for_compiler=default_compilers() + opt.load_special_tools('fc_*.py') + fortran_compiler_opts=opt.add_option_group('Configuration options') + fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler") + for x in test_for_compiler.split(): + opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py new file mode 100644 index 00000000..fd023a40 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py @@ -0,0 +1,132 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Options,Errors +from waflib.TaskGen import before_method,after_method,feature +from waflib.Tools import ccroot +from waflib.Configure import conf +import os,tempfile +ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES']) +ccroot.lib_patterns['csshlib']=['%s'] +@feature('cs') +@before_method('process_source') +def apply_cs(self): + cs_nodes=[] + no_nodes=[] + for x in self.to_nodes(self.source): + if x.name.endswith('.cs'): + cs_nodes.append(x) + else: + no_nodes.append(x) + self.source=no_nodes + bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe') + self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen)) + tsk.env.CSTYPE='/target:%s'%bintype + tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath() + self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu')) + inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}') + if inst_to: + mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644) + self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod) +@feature('cs') +@after_method('apply_cs') +def use_cs(self): + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Errors.WafError: + self.env.append_value('CSFLAGS','/reference:%s'%x) + continue + y.post() + tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None) + if not tsk: + self.bld.fatal('cs task has no link task for use %r'%self) + self.cs_task.dep_nodes.extend(tsk.outputs) + self.cs_task.set_run_after(tsk) + self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath()) +@feature('cs') +@after_method('apply_cs','use_cs') +def debug_cs(self): + csdebug=getattr(self,'csdebug',self.env.CSDEBUG) + if not csdebug: + return + node=self.cs_task.outputs[0] + if self.env.CS_NAME=='mono': + out=node.parent.find_or_declare(node.name+'.mdb') + else: + out=node.change_ext('.pdb') + self.cs_task.outputs.append(out) + try: + self.install_task.source.append(out) + except AttributeError: + pass + if csdebug=='pdbonly': + val=['/debug+','/debug:pdbonly'] + elif csdebug=='full': + val=['/debug+','/debug:full'] + else: + val=['/debug-'] + self.env.append_value('CSFLAGS',val) +class mcs(Task.Task): + color='YELLOW' + run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' + def exec_command(self,cmd,**kw): + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + try: + tmp=None + if isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]).encode()) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret + def quote_response_command(self,flag): + if flag.lower()=='/noconfig': + return'' + if flag.find(' ')>-1: + for x in('/r:','/reference:','/resource:','/lib:','/out:'): + if flag.startswith(x): + flag='%s"%s"'%(x,'","'.join(flag[len(x):].split(','))) + break + else: + flag='"%s"'%flag + return flag +def configure(conf): + csc=getattr(Options.options,'cscbinary',None) + if csc: + conf.env.MCS=csc + conf.find_program(['csc','mcs','gmcs'],var='MCS') + conf.env.ASS_ST='/r:%s' + conf.env.RES_ST='/resource:%s' + conf.env.CS_NAME='csc' + if str(conf.env.MCS).lower().find('mcs')>-1: + conf.env.CS_NAME='mono' +def options(opt): + opt.add_option('--with-csc-binary',type='string',dest='cscbinary') +class fake_csshlib(Task.Task): + color='YELLOW' + inst_to=None + def runnable_status(self): + for x in self.outputs: + x.sig=Utils.h_file(x.abspath()) + return Task.SKIP_ME +@conf +def read_csshlib(self,name,paths=[]): + return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py new file mode 100644 index 00000000..6f039e93 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py @@ -0,0 +1,26 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import TaskGen,Task +from waflib.Tools import c_preproc +from waflib.Tools.ccroot import link_task,stlink_task +@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') +def cxx_hook(self,node): + return self.create_compiled_task('cxx',node) +if not'.c'in TaskGen.task_gen.mappings: + TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp'] +class cxx(Task.Task): + run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}' + vars=['CXXDEPS'] + ext_in=['.h'] + scan=c_preproc.scan +class cxxprogram(link_task): + run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' + vars=['LINKDEPS'] + ext_out=['.bin'] + inst_to='${BINDIR}' +class cxxshlib(cxxprogram): + inst_to='${LIBDIR}' +class cxxstlib(stlink_task): + pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py new file mode 100644 index 00000000..e8c98f00 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Errors +from waflib.TaskGen import taskgen_method,feature,extension +from waflib.Tools import d_scan,d_config +from waflib.Tools.ccroot import link_task,stlink_task +class d(Task.Task): + color='GREEN' + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' + scan=d_scan.scan +class d_with_header(d): + run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' +class d_header(Task.Task): + color='BLUE' + run_str='${D} ${D_HEADER} ${SRC}' +class dprogram(link_task): + run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' + inst_to='${BINDIR}' +class dshlib(dprogram): + inst_to='${LIBDIR}' +class dstlib(stlink_task): + pass +@extension('.d','.di','.D') +def d_hook(self,node): + ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o' + out='%s.%d.%s'%(node.name,self.idx,ext) + def create_compiled_task(self,name,node): + task=self.create_task(name,node,node.parent.find_or_declare(out)) + try: + self.compiled_tasks.append(task) + except AttributeError: + self.compiled_tasks=[task] + return task + if getattr(self,'generate_headers',None): + tsk=create_compiled_task(self,'d_with_header',node) + tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) + else: + tsk=create_compiled_task(self,'d',node) + return tsk +@taskgen_method +def generate_header(self,filename): + try: + self.header_lst.append([filename,self.install_path]) + except AttributeError: + self.header_lst=[[filename,self.install_path]] +@feature('d') +def process_header(self): + for i in getattr(self,'header_lst',[]): + node=self.path.find_resource(i[0]) + if not node: + raise Errors.WafError('file %r not found on d obj'%i[0]) + self.create_task('d_header',node,node.change_ext('.di')) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py new file mode 100644 index 00000000..71b7b6e1 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py @@ -0,0 +1,52 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils +from waflib.Configure import conf +@conf +def d_platform_flags(self): + v=self.env + if not v.DEST_OS: + v.DEST_OS=Utils.unversioned_sys_platform() + binfmt=Utils.destos_to_binfmt(self.env.DEST_OS) + if binfmt=='pe': + v['dprogram_PATTERN']='%s.exe' + v['dshlib_PATTERN']='lib%s.dll' + v['dstlib_PATTERN']='lib%s.a' + elif binfmt=='mac-o': + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.dylib' + v['dstlib_PATTERN']='lib%s.a' + else: + v['dprogram_PATTERN']='%s' + v['dshlib_PATTERN']='lib%s.so' + v['dstlib_PATTERN']='lib%s.a' +DLIB=''' +version(D_Version2) { + import std.stdio; + int main() { + writefln("phobos2"); + return 0; + } +} else { + version(Tango) { + import tango.stdc.stdio; + int main() { + printf("tango"); + return 0; + } + } else { + import std.stdio; + int main() { + writefln("phobos1"); + return 0; + } + } +} +''' +@conf +def check_dlibrary(self,execute=True): + ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True) + if execute: + self.env.DLIBRARY=ret.strip() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py new file mode 100644 index 00000000..47f91968 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py @@ -0,0 +1,133 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils,Logs +def filter_comments(filename): + txt=Utils.readf(filename) + i=0 + buf=[] + max=len(txt) + begin=0 + while i-1: + conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') + conf.load('ar') + conf.load('d') + conf.common_flags_dmd() + conf.d_platform_flags() + if str(conf.env.D).find('ldc')>-1: + conf.common_flags_ldc() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py new file mode 100644 index 00000000..421dfa6e --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py @@ -0,0 +1,163 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',} +meths_typos=['__call__','program','shlib','stlib','objects'] +import sys +from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils +import waflib.Tools.ccroot +def check_same_targets(self): + mp=Utils.defaultdict(list) + uids={} + def check_task(tsk): + if not isinstance(tsk,Task.Task): + return + for node in tsk.outputs: + mp[node].append(tsk) + try: + uids[tsk.uid()].append(tsk) + except KeyError: + uids[tsk.uid()]=[tsk] + for g in self.groups: + for tg in g: + try: + for tsk in tg.tasks: + check_task(tsk) + except AttributeError: + check_task(tg) + dupe=False + for(k,v)in mp.items(): + if len(v)>1: + dupe=True + msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"") + Logs.error(msg) + for x in v: + if Logs.verbose>1: + Logs.error(' %d. %r'%(1+v.index(x),x.generator)) + else: + Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None))) + if not dupe: + for(k,v)in uids.items(): + if len(v)>1: + Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') + for tsk in v: + Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator)) +def check_invalid_constraints(self): + feat=set([]) + for x in list(TaskGen.feats.values()): + feat.union(set(x)) + for(x,y)in TaskGen.task_gen.prec.items(): + feat.add(x) + feat.union(set(y)) + ext=set([]) + for x in TaskGen.task_gen.mappings.values(): + ext.add(x.__name__) + invalid=ext&feat + if invalid: + Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid)) + for cls in list(Task.classes.values()): + if sys.hexversion>0x3000000 and issubclass(cls,Task.Task)and isinstance(cls.hcode,str): + raise Errors.WafError('Class %r has hcode value %r of type , expecting (use Utils.h_cmd() ?)'%(cls,cls.hcode)) + for x in('before','after'): + for y in Utils.to_list(getattr(cls,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__)) + if getattr(cls,'rule',None): + Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__) +def replace(m): + oldcall=getattr(Build.BuildContext,m) + def call(self,*k,**kw): + ret=oldcall(self,*k,**kw) + for x in typos: + if x in kw: + if x=='iscopy'and'subst'in getattr(self,'features',''): + continue + Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret)) + return ret + setattr(Build.BuildContext,m,call) +def enhance_lib(): + for m in meths_typos: + replace(m) + def ant_glob(self,*k,**kw): + if k: + lst=Utils.to_list(k[0]) + for pat in lst: + if'..'in pat.split('/'): + Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0]) + if kw.get('remove',True): + try: + if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False): + Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self) + except AttributeError: + pass + return self.old_ant_glob(*k,**kw) + Node.Node.old_ant_glob=Node.Node.ant_glob + Node.Node.ant_glob=ant_glob + old=Task.is_before + def is_before(t1,t2): + ret=old(t1,t2) + if ret and old(t2,t1): + Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2)) + return ret + Task.is_before=is_before + def check_err_features(self): + lst=self.to_list(self.features) + if'shlib'in lst: + Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') + for x in('c','cxx','d','fc'): + if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: + Logs.error('%r features is probably missing %r'%(self,x)) + TaskGen.feature('*')(check_err_features) + def check_err_order(self): + if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features): + for x in('before','after','ext_in','ext_out'): + if hasattr(self,x): + Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self)) + else: + for x in('before','after'): + for y in self.to_list(getattr(self,x,[])): + if not Task.classes.get(y,None): + Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self)) + TaskGen.feature('*')(check_err_order) + def check_compile(self): + check_invalid_constraints(self) + try: + ret=self.orig_compile() + finally: + check_same_targets(self) + return ret + Build.BuildContext.orig_compile=Build.BuildContext.compile + Build.BuildContext.compile=check_compile + def use_rec(self,name,**kw): + try: + y=self.bld.get_tgen_by_name(name) + except Errors.WafError: + pass + else: + idx=self.bld.get_group_idx(self) + odx=self.bld.get_group_idx(y) + if odx>idx: + msg="Invalid 'use' across build groups:" + if Logs.verbose>1: + msg+='\n target %r\n uses:\n %r'%(self,y) + else: + msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name) + raise Errors.WafError(msg) + self.orig_use_rec(name,**kw) + TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec + TaskGen.task_gen.use_rec=use_rec + def getattri(self,name,default=None): + if name=='append'or name=='add': + raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') + elif name=='prepend': + raise Errors.WafError('env.prepend does not exist: use env.prepend_value') + if name in self.__slots__: + return object.__getattr__(self,name,default) + else: + return self[name] + ConfigSet.ConfigSet.__getattr__=getattri +def options(opt): + enhance_lib() +def configure(conf): + pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py new file mode 100644 index 00000000..bc9f0b0b --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py @@ -0,0 +1,115 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib import Utils,Task,Logs +from waflib.Tools import ccroot,fc_config,fc_scan +from waflib.TaskGen import feature,extension +from waflib.Configure import conf +ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES']) +ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) +ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS']) +@feature('fcprogram','fcshlib','fcstlib','fcprogram_test') +def dummy(self): + pass +@extension('.f','.f90','.F','.F90','.for','.FOR') +def fc_hook(self,node): + return self.create_compiled_task('fc',node) +@conf +def modfile(conf,name): + return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower'] +def get_fortran_tasks(tsk): + bld=tsk.generator.bld + tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator)) + return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)] +class fc(Task.Task): + color='GREEN' + run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' + vars=["FORTRANMODPATHFLAG"] + def scan(self): + tmp=fc_scan.fortran_parser(self.generator.includes_nodes) + tmp.task=self + tmp.start(self.inputs[0]) + if Logs.verbose: + Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names)) + return(tmp.nodes,tmp.names) + def runnable_status(self): + if getattr(self,'mod_fortran_done',None): + return super(fc,self).runnable_status() + bld=self.generator.bld + lst=get_fortran_tasks(self) + for tsk in lst: + tsk.mod_fortran_done=True + for tsk in lst: + ret=tsk.runnable_status() + if ret==Task.ASK_LATER: + for x in lst: + x.mod_fortran_done=None + return Task.ASK_LATER + ins=Utils.defaultdict(set) + outs=Utils.defaultdict(set) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('MOD@'): + name=bld.modfile(x.replace('MOD@','')) + node=bld.srcnode.find_or_declare(name) + if not getattr(node,'sig',None): + node.sig=Utils.SIG_NIL + tsk.set_outputs(node) + outs[id(node)].add(tsk) + for tsk in lst: + key=tsk.uid() + for x in bld.raw_deps[key]: + if x.startswith('USE@'): + name=bld.modfile(x.replace('USE@','')) + node=bld.srcnode.find_resource(name) + if node and node not in tsk.outputs: + if not node in bld.node_deps[key]: + bld.node_deps[key].append(node) + ins[id(node)].add(tsk) + for k in ins.keys(): + for a in ins[k]: + a.run_after.update(outs[k]) + tmp=[] + for t in outs[k]: + tmp.extend(t.outputs) + a.dep_nodes.extend(tmp) + a.dep_nodes.sort(key=lambda x:x.abspath()) + for tsk in lst: + try: + delattr(tsk,'cache_sig') + except AttributeError: + pass + return super(fc,self).runnable_status() +class fcprogram(ccroot.link_task): + color='YELLOW' + run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' + inst_to='${BINDIR}' +class fcshlib(fcprogram): + inst_to='${LIBDIR}' +class fcprogram_test(fcprogram): + def runnable_status(self): + ret=super(fcprogram_test,self).runnable_status() + if ret==Task.SKIP_ME: + ret=Task.RUN_ME + return ret + def exec_command(self,cmd,**kw): + bld=self.generator.bld + kw['shell']=isinstance(cmd,str) + kw['stdout']=kw['stderr']=Utils.subprocess.PIPE + kw['cwd']=bld.variant_dir + bld.out=bld.err='' + bld.to_log('command: %s\n'%cmd) + kw['output']=0 + try: + (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw) + except Exception: + return-1 + if bld.out: + bld.to_log("out: %s\n"%bld.out) + if bld.err: + bld.to_log("err: %s\n"%bld.err) +class fcstlib(ccroot.stlink_task): + pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py new file mode 100644 index 00000000..58d8ccf5 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py @@ -0,0 +1,286 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re,os,sys,shlex +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method +FC_FRAGMENT=' program main\n end program main\n' +FC_FRAGMENT2=' PROGRAM MAIN\n END\n' +@conf +def fc_flags(conf): + v=conf.env + v['FC_SRC_F']=[] + v['FC_TGT_F']=['-c','-o'] + v['FCINCPATH_ST']='-I%s' + v['FCDEFINES_ST']='-D%s' + if not v['LINK_FC']:v['LINK_FC']=v['FC'] + v['FCLNK_SRC_F']=[] + v['FCLNK_TGT_F']=['-o'] + v['FCFLAGS_fcshlib']=['-fpic'] + v['LINKFLAGS_fcshlib']=['-shared'] + v['fcshlib_PATTERN']='lib%s.so' + v['fcstlib_PATTERN']='lib%s.a' + v['FCLIB_ST']='-l%s' + v['FCLIBPATH_ST']='-L%s' + v['FCSTLIB_ST']='-l%s' + v['FCSTLIBPATH_ST']='-L%s' + v['FCSTLIB_MARKER']='-Wl,-Bstatic' + v['FCSHLIB_MARKER']='-Wl,-Bdynamic' + v['SONAME_ST']='-Wl,-h,%s' +@conf +def fc_add_flags(conf): + conf.add_os_flags('FCFLAGS',dup=False) + conf.add_os_flags('LINKFLAGS',dup=False) + conf.add_os_flags('LDFLAGS',dup=False) +@conf +def check_fortran(self,*k,**kw): + self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app') +@conf +def check_fc(self,*k,**kw): + kw['compiler']='fc' + if not'compile_mode'in kw: + kw['compile_mode']='fc' + if not'type'in kw: + kw['type']='fcprogram' + if not'compile_filename'in kw: + kw['compile_filename']='test.f90' + if not'code'in kw: + kw['code']=FC_FRAGMENT + return self.check(*k,**kw) +@conf +def fortran_modifier_darwin(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['LINKFLAGS_fcshlib']=['-dynamiclib'] + v['fcshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']='-framework %s' + v['LINKFLAGS_fcstlib']=[] + v['FCSHLIB_MARKER']='' + v['FCSTLIB_MARKER']='' + v['SONAME_ST']='' +@conf +def fortran_modifier_win32(conf): + v=conf.env + v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe' + v['fcshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['FCFLAGS_fcshlib']=[] + v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT']) + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def fortran_modifier_cygwin(conf): + fortran_modifier_win32(conf) + v=conf.env + v['fcshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base']) + v['FCFLAGS_fcshlib']=[] +@conf +def check_fortran_dummy_main(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for check_fortran_dummy_main') + lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN'] + lst.extend([m.lower()for m in lst]) + lst.append('') + self.start_msg('Detecting whether we need a dummy main') + for main in lst: + kw['fortran_main']=main + try: + self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True) + if not main: + self.env.FC_MAIN=-1 + self.end_msg('no') + else: + self.env.FC_MAIN=main + self.end_msg('yes %s'%main) + break + except self.errors.ConfigurationError: + pass + else: + self.end_msg('not found') + self.fatal('could not detect whether fortran requires a dummy main, see the config.log') +GCC_DRIVER_LINE=re.compile('^Driving:') +POSIX_STATIC_EXT=re.compile('\S+\.a') +POSIX_LIB_FLAGS=re.compile('-l\S+') +@conf +def is_link_verbose(self,txt): + assert isinstance(txt,str) + for line in txt.splitlines(): + if not GCC_DRIVER_LINE.search(line): + if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line): + return True + return False +@conf +def check_fortran_verbose_flag(self,*k,**kw): + self.start_msg('fortran link verbose flag') + for x in('-v','--verbose','-verbose','-V'): + try: + self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True) + except self.errors.ConfigurationError: + pass + else: + if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out): + self.end_msg(x) + break + else: + self.end_msg('failure') + self.fatal('Could not obtain the fortran link verbose flag (see config.log)') + self.env.FC_VERBOSE_FLAG=x + return x +LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*'] +if os.name=='nt': + LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname']) +else: + LINKFLAGS_IGNORED.append(r'-lgcc*') +RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED] +def _match_ignore(line): + for i in RLINKFLAGS_IGNORED: + if i.match(line): + return True + return False +def parse_fortran_link(lines): + final_flags=[] + for line in lines: + if not GCC_DRIVER_LINE.match(line): + _parse_flink_line(line,final_flags) + return final_flags +SPACE_OPTS=re.compile('^-[LRuYz]$') +NOSPACE_OPTS=re.compile('^-[RL]') +def _parse_flink_token(lexer,token,tmp_flags): + if _match_ignore(token): + pass + elif token.startswith('-lkernel32')and sys.platform=='cygwin': + tmp_flags.append(token) + elif SPACE_OPTS.match(token): + t=lexer.get_token() + if t.startswith('P,'): + t=t[2:] + for opt in t.split(os.pathsep): + tmp_flags.append('-L%s'%opt) + elif NOSPACE_OPTS.match(token): + tmp_flags.append(token) + elif POSIX_LIB_FLAGS.match(token): + tmp_flags.append(token) + else: + pass + t=lexer.get_token() + return t +def _parse_flink_line(line,final_flags): + lexer=shlex.shlex(line,posix=True) + lexer.whitespace_split=True + t=lexer.get_token() + tmp_flags=[] + while t: + t=_parse_flink_token(lexer,t,tmp_flags) + final_flags.extend(tmp_flags) + return final_flags +@conf +def check_fortran_clib(self,autoadd=True,*k,**kw): + if not self.env.FC_VERBOSE_FLAG: + self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') + self.start_msg('Getting fortran runtime link flags') + try: + self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG]) + except Exception: + self.end_msg(False) + if kw.get('mandatory',True): + conf.fatal('Could not find the c library flags') + else: + out=self.test_bld.err + flags=parse_fortran_link(out.splitlines()) + self.end_msg('ok (%s)'%' '.join(flags)) + self.env.LINKFLAGS_CLIB=flags + return flags + return[] +def getoutput(conf,cmd,stdin=False): + from waflib import Errors + if conf.env.env: + env=conf.env.env + else: + env=dict(os.environ) + env['LANG']='C' + input=stdin and'\n'.encode()or None + try: + out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input) + except Errors.WafError as e: + if not(hasattr(e,'stderr')and hasattr(e,'stdout')): + raise e + else: + out=e.stdout + err=e.stderr + except Exception: + conf.fatal('could not determine the compiler version %r'%cmd) + return(out,err) +ROUTINES_CODE="""\ + subroutine foobar() + return + end + subroutine foo_bar() + return + end +""" +MAIN_CODE=""" +void %(dummy_func_nounder)s(void); +void %(dummy_func_under)s(void); +int %(main_func_name)s() { + %(dummy_func_nounder)s(); + %(dummy_func_under)s(); + return 0; +} +""" +@feature('link_main_routines_func') +@before_method('process_source') +def link_main_routines_tg_method(self): + def write_test_file(task): + task.outputs[0].write(task.generator.code) + bld=self.bld + bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__) + bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE) + bld(features='fc fcstlib',source='test.f',target='test') + bld(features='c fcprogram',source='main.c',target='app',use='test') +def mangling_schemes(): + for u in('_',''): + for du in('','_'): + for c in("lower","upper"): + yield(u,du,c) +def mangle_name(u,du,c,name): + return getattr(name,c)()+u+(name.find('_')!=-1 and du or'') +@conf +def check_fortran_mangling(self,*k,**kw): + if not self.env.CC: + self.fatal('A c compiler is required for link_main_routines') + if not self.env.FC: + self.fatal('A fortran compiler is required for link_main_routines') + if not self.env.FC_MAIN: + self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') + self.start_msg('Getting fortran mangling scheme') + for(u,du,c)in mangling_schemes(): + try: + self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN) + except self.errors.ConfigurationError: + pass + else: + self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c)) + self.env.FORTRAN_MANGLING=(u,du,c) + break + else: + self.end_msg(False) + self.fatal('mangler not found') + return(u,du,c) +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +def set_lib_pat(self): + self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN'] +@conf +def detect_openmp(self): + for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): + try: + self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP') + except self.errors.ConfigurationError: + pass + else: + break + else: + self.fatal('Could not find OpenMP') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py new file mode 100644 index 00000000..c07a22da --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py @@ -0,0 +1,64 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" +USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" +re_inc=re.compile(INC_REGEX,re.I) +re_use=re.compile(USE_REGEX,re.I) +re_mod=re.compile(MOD_REGEX,re.I) +class fortran_parser(object): + def __init__(self,incpaths): + self.seen=[] + self.nodes=[] + self.names=[] + self.incpaths=incpaths + def find_deps(self,node): + txt=node.read() + incs=[] + uses=[] + mods=[] + for line in txt.splitlines(): + m=re_inc.search(line) + if m: + incs.append(m.group(1)) + m=re_use.search(line) + if m: + uses.append(m.group(1)) + m=re_mod.search(line) + if m: + mods.append(m.group(1)) + return(incs,uses,mods) + def start(self,node): + self.waiting=[node] + while self.waiting: + nd=self.waiting.pop(0) + self.iter(nd) + def iter(self,node): + incs,uses,mods=self.find_deps(node) + for x in incs: + if x in self.seen: + continue + self.seen.append(x) + self.tryfind_header(x) + for x in uses: + name="USE@%s"%x + if not name in self.names: + self.names.append(name) + for x in mods: + name="MOD@%s"%x + if not name in self.names: + self.names.append(name) + def tryfind_header(self,filename): + found=None + for n in self.incpaths: + found=n.find_resource(filename) + if found: + self.nodes.append(found) + self.waiting.append(found) + break + if not found: + if not filename in self.names: + self.names.append(filename) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py new file mode 100644 index 00000000..7a04074e --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py @@ -0,0 +1,32 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import waflib.TaskGen,os,re +def decide_ext(self,node): + if'cxx'in self.features: + return['.lex.cc'] + return['.lex.c'] +def flexfun(tsk): + env=tsk.env + bld=tsk.generator.bld + wd=bld.variant_dir + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + tsk.last_cmd=lst=[] + lst.extend(to_list(env['FLEX'])) + lst.extend(to_list(env['FLEXFLAGS'])) + inputs=[a.path_from(bld.bldnode)for a in tsk.inputs] + if env.FLEX_MSYS: + inputs=[x.replace(os.sep,'/')for x in inputs] + lst.extend(inputs) + lst=[x for x in lst if x] + txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n')) +waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,) +def configure(conf): + conf.find_program('flex',var='FLEX') + conf.env.FLEXFLAGS=['-t'] + if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]): + conf.env.FLEX_MSYS=True diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py new file mode 100644 index 00000000..6524e1c0 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py @@ -0,0 +1,54 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_g95(conf): + fc=conf.find_program('g95',var='FC') + conf.get_g95_version(fc) + conf.env.FC_NAME='G95' +@conf +def g95_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-fmod=',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def g95_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def g95_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def g95_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def g95_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) + if g95_modifier_func: + g95_modifier_func() +@conf +def get_g95_version(conf,fc): + version_re=re.compile(r"g95\s*(?P\d*)\.(?P\d*)").search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out: + match=version_re(out) + else: + match=version_re(err) + if not match: + conf.fatal('cannot determine g95 version') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_g95() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.g95_flags() + conf.g95_modifier_platform() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py new file mode 100644 index 00000000..4817c231 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py @@ -0,0 +1,12 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import waflib.Tools.asm +from waflib.Tools import ar +def configure(conf): + conf.find_program(['gas','gcc'],var='AS') + conf.env.AS_TGT_F=['-c','-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.find_ar() + conf.load('asm') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py new file mode 100644 index 00000000..a3c7720f --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py @@ -0,0 +1,102 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_gcc(conf): + cc=conf.find_program(['gcc','cc'],var='CC') + conf.get_cc_version(cc,gcc=True) + conf.env.CC_NAME='gcc' +@conf +def gcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Wl,-Bdynamic' + v['STLIB_MARKER']='-Wl,-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-shared'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Wl,-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' + v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] + v['CFLAGS_MACBUNDLE']=['-fPIC'] + v['macbundle_PATTERN']='%s.bundle' +@conf +def gcc_modifier_win32(conf): + v=conf.env + v['cprogram_PATTERN']='%s.exe' + v['cshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='lib%s.dll.a' + v['IMPLIB_ST']='-Wl,--out-implib,%s' + v['CFLAGS_cshlib']=[] + v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) +@conf +def gcc_modifier_cygwin(conf): + gcc_modifier_win32(conf) + v=conf.env + v['cshlib_PATTERN']='cyg%s.dll' + v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base']) + v['CFLAGS_cshlib']=[] +@conf +def gcc_modifier_darwin(conf): + v=conf.env + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-dynamiclib'] + v['cshlib_PATTERN']='lib%s.dylib' + v['FRAMEWORKPATH_ST']='-F%s' + v['FRAMEWORK_ST']=['-framework'] + v['ARCH_ST']=['-arch'] + v['LINKFLAGS_cstlib']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gcc_modifier_aix(conf): + v=conf.env + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull'] + v['SHLIB_MARKER']=[] +@conf +def gcc_modifier_hpux(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['CFLAGS_cshlib']=['-fPIC','-DPIC'] + v['cshlib_PATTERN']='lib%s.sl' +@conf +def gcc_modifier_openbsd(conf): + conf.env.SONAME_ST=[] +@conf +def gcc_modifier_osf1V(conf): + v=conf.env + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['SONAME_ST']=[] +@conf +def gcc_modifier_platform(conf): + gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None) + if gcc_modifier_func: + gcc_modifier_func() +def configure(conf): + conf.find_gcc() + conf.find_ar() + conf.gcc_common_flags() + conf.gcc_modifier_platform() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py new file mode 100644 index 00000000..acfea4a2 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py @@ -0,0 +1,35 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_gdc(conf): + conf.find_program('gdc',var='D') + out=conf.cmd_and_log(conf.env.D+['--version']) + if out.find("gdc")==-1: + conf.fatal("detected compiler is not gdc") +@conf +def common_flags_gdc(conf): + v=conf.env + v['DFLAGS']=[] + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-o%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-o%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s' + v['LINKFLAGS_dshlib']=['-shared'] + v['DHEADER_ext']='.di' + v.DFLAGS_d_with_header='-fintfc' + v['D_HDR_F']='-fintfc-file=%s' +def configure(conf): + conf.find_gdc() + conf.load('ar') + conf.load('d') + conf.common_flags_gdc() + conf.d_platform_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py new file mode 100644 index 00000000..a0ea00b6 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py @@ -0,0 +1,68 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Utils +from waflib.Tools import fc,fc_config,fc_scan,ar +from waflib.Configure import conf +@conf +def find_gfortran(conf): + fc=conf.find_program(['gfortran','g77'],var='FC') + conf.get_gfortran_version(fc) + conf.env.FC_NAME='GFORTRAN' +@conf +def gfortran_flags(conf): + v=conf.env + v['FCFLAGS_fcshlib']=['-fPIC'] + v['FORTRANMODFLAG']=['-J',''] + v['FCFLAGS_DEBUG']=['-Werror'] +@conf +def gfortran_modifier_win32(conf): + fc_config.fortran_modifier_win32(conf) +@conf +def gfortran_modifier_cygwin(conf): + fc_config.fortran_modifier_cygwin(conf) +@conf +def gfortran_modifier_darwin(conf): + fc_config.fortran_modifier_darwin(conf) +@conf +def gfortran_modifier_platform(conf): + dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() + gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None) + if gfortran_modifier_func: + gfortran_modifier_func() +@conf +def get_gfortran_version(conf,fc): + version_re=re.compile(r"GNU\s*Fortran",re.I).search + cmd=fc+['--version'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + if out:match=version_re(out) + else:match=version_re(err) + if not match: + conf.fatal('Could not determine the compiler type') + cmd=fc+['-dM','-E','-'] + out,err=fc_config.getoutput(conf,cmd,stdin=True) + if out.find('__GNUC__')<0: + conf.fatal('Could not determine the compiler type') + k={} + out=out.splitlines() + import shlex + for line in out: + lst=shlex.split(line) + if len(lst)>2: + key=lst[1] + val=lst[2] + k[key]=val + def isD(var): + return var in k + def isT(var): + return var in k and k[var]!='0' + conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) +def configure(conf): + conf.find_gfortran() + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.gfortran_flags() + conf.gfortran_modifier_platform() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py new file mode 100644 index 00000000..47ee8235 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py @@ -0,0 +1,234 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib import Context,Task,Utils,Options,Errors,Logs +from waflib.TaskGen import taskgen_method,before_method,feature,extension +from waflib.Configure import conf +@taskgen_method +def add_marshal_file(self,filename,prefix): + if not hasattr(self,'marshal_list'): + self.marshal_list=[] + self.meths.append('process_marshal') + self.marshal_list.append((filename,prefix)) +@before_method('process_source') +def process_marshal(self): + for f,prefix in getattr(self,'marshal_list',[]): + node=self.path.find_resource(f) + if not node: + raise Errors.WafError('file not found %r'%f) + h_node=node.change_ext('.h') + c_node=node.change_ext('.c') + task=self.create_task('glib_genmarshal',node,[h_node,c_node]) + task.env.GLIB_GENMARSHAL_PREFIX=prefix + self.source=self.to_nodes(getattr(self,'source',[])) + self.source.append(c_node) +class glib_genmarshal(Task.Task): + def run(self): + bld=self.inputs[0].__class__.ctx + get=self.env.get_flat + cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath()) + ret=bld.exec_command(cmd1) + if ret:return ret + c='''#include "%s"\n'''%self.outputs[0].name + self.outputs[1].write(c) + cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath()) + return bld.exec_command(cmd2) + vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'] + color='BLUE' + ext_out=['.h'] +@taskgen_method +def add_enums_from_template(self,source='',target='',template='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) +@taskgen_method +def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): + if not hasattr(self,'enums_list'): + self.enums_list=[] + self.meths.append('process_enums') + self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) +@before_method('process_source') +def process_enums(self): + for enum in getattr(self,'enums_list',[]): + task=self.create_task('glib_mkenums') + env=task.env + inputs=[] + source_list=self.to_list(enum['source']) + if not source_list: + raise Errors.WafError('missing source '+str(enum)) + source_list=[self.path.find_resource(k)for k in source_list] + inputs+=source_list + env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + if not enum['target']: + raise Errors.WafError('missing target '+str(enum)) + tgt_node=self.path.find_or_declare(enum['target']) + if tgt_node.name.endswith('.c'): + self.source.append(tgt_node) + env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + options=[] + if enum['template']: + template_node=self.path.find_resource(enum['template']) + options.append('--template %s'%(template_node.abspath())) + inputs.append(template_node) + params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} + for param,option in params.items(): + if enum[param]: + options.append('%s %r'%(option,enum[param])) + env['GLIB_MKENUMS_OPTIONS']=' '.join(options) + task.set_inputs(inputs) + task.set_outputs(tgt_node) +class glib_mkenums(Task.Task): + run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' + color='PINK' + ext_out=['.h'] +@taskgen_method +def add_settings_schemas(self,filename_list): + if not hasattr(self,'settings_schema_files'): + self.settings_schema_files=[] + if not isinstance(filename_list,list): + filename_list=[filename_list] + self.settings_schema_files.extend(filename_list) +@taskgen_method +def add_settings_enums(self,namespace,filename_list): + if hasattr(self,'settings_enum_namespace'): + raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name) + self.settings_enum_namespace=namespace + if type(filename_list)!='list': + filename_list=[filename_list] + self.settings_enum_files=filename_list +@feature('glib2') +def process_settings(self): + enums_tgt_node=[] + install_files=[] + settings_schema_files=getattr(self,'settings_schema_files',[]) + if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: + raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") + if hasattr(self,'settings_enum_files'): + enums_task=self.create_task('glib_mkenums') + source_list=self.settings_enum_files + source_list=[self.path.find_resource(k)for k in source_list] + enums_task.set_inputs(source_list) + enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] + target=self.settings_enum_namespace+'.enums.xml' + tgt_node=self.path.find_or_declare(target) + enums_task.set_outputs(tgt_node) + enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() + enums_tgt_node=[tgt_node] + install_files.append(tgt_node) + options='--comments "" --fhead "" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " " --vtail " " --ftail "" '%(self.settings_enum_namespace) + enums_task.env['GLIB_MKENUMS_OPTIONS']=options + for schema in settings_schema_files: + schema_task=self.create_task('glib_validate_schema') + schema_node=self.path.find_resource(schema) + if not schema_node: + raise Errors.WafError("Cannot find the schema file '%s'"%schema) + install_files.append(schema_node) + source_list=enums_tgt_node+[schema_node] + schema_task.set_inputs(source_list) + schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list] + target_node=schema_node.change_ext('.xml.valid') + schema_task.set_outputs(target_node) + schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath() + def compile_schemas_callback(bld): + if not bld.is_install:return + Logs.pprint('YELLOW','Updating GSettings schema cache') + command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env) + self.bld.exec_command(command) + if self.bld.is_install: + if not self.env['GSETTINGSSCHEMADIR']: + raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') + if install_files: + self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files) + if not hasattr(self.bld,'_compile_schemas_registered'): + self.bld.add_post_fun(compile_schemas_callback) + self.bld._compile_schemas_registered=True +class glib_validate_schema(Task.Task): + run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' + color='PINK' +@extension('.gresource.xml') +def process_gresource_source(self,node): + if not self.env['GLIB_COMPILE_RESOURCES']: + raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure") + if'gresource'in self.features: + return + h_node=node.change_ext('_xml.h') + c_node=node.change_ext('_xml.c') + self.create_task('glib_gresource_source',node,[h_node,c_node]) + self.source.append(c_node) +@feature('gresource') +def process_gresource_bundle(self): + for i in self.to_list(self.source): + node=self.path.find_resource(i) + task=self.create_task('glib_gresource_bundle',node,node.change_ext('')) + inst_to=getattr(self,'install_path',None) + if inst_to: + self.bld.install_files(inst_to,task.outputs) +class glib_gresource_base(Task.Task): + color='BLUE' + base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' + def scan(self): + bld=self.generator.bld + kw={} + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + kw['quiet']=Context.BOTH + cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env) + output=bld.cmd_and_log(cmd,**kw) + nodes=[] + names=[] + for dep in output.splitlines(): + if dep: + node=bld.bldnode.find_node(dep) + if node: + nodes.append(node) + else: + names.append(dep) + return(nodes,names) +class glib_gresource_source(glib_gresource_base): + vars=['GLIB_COMPILE_RESOURCES'] + fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}') + fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}') + ext_out=['.h'] + def run(self): + return self.fun_h[0](self)or self.fun_c[0](self) +class glib_gresource_bundle(glib_gresource_base): + run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}' + shell=True +@conf +def find_glib_genmarshal(conf): + conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') +@conf +def find_glib_mkenums(conf): + if not conf.env.PERL: + conf.find_program('perl',var='PERL') + conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS') +@conf +def find_glib_compile_schemas(conf): + conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS') + def getstr(varname): + return getattr(Options.options,varname,getattr(conf.env,varname,'')) + gsettingsschemadir=getstr('GSETTINGSSCHEMADIR') + if not gsettingsschemadir: + datadir=getstr('DATADIR') + if not datadir: + prefix=conf.env['PREFIX'] + datadir=os.path.join(prefix,'share') + gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas') + conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir +@conf +def find_glib_compile_resources(conf): + conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES') +def configure(conf): + conf.find_glib_genmarshal() + conf.find_glib_mkenums() + conf.find_glib_compile_schemas(mandatory=False) + conf.find_glib_compile_resources(mandatory=False) +def options(opt): + gr=opt.add_option_group('Installation directories') + gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py new file mode 100644 index 00000000..21a62885 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py @@ -0,0 +1,66 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Utils,Options,Context +gnuopts=''' +bindir, user commands, ${EXEC_PREFIX}/bin +sbindir, system binaries, ${EXEC_PREFIX}/sbin +libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec +sysconfdir, host-specific configuration, ${PREFIX}/etc +sharedstatedir, architecture-independent variable data, ${PREFIX}/com +localstatedir, variable data, ${PREFIX}/var +libdir, object code libraries, ${EXEC_PREFIX}/lib%s +includedir, header files, ${PREFIX}/include +oldincludedir, header files for non-GCC compilers, /usr/include +datarootdir, architecture-independent data root, ${PREFIX}/share +datadir, architecture-independent data, ${DATAROOTDIR} +infodir, GNU "info" documentation, ${DATAROOTDIR}/info +localedir, locale-dependent data, ${DATAROOTDIR}/locale +mandir, manual pages, ${DATAROOTDIR}/man +docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} +htmldir, HTML documentation, ${DOCDIR} +dvidir, DVI documentation, ${DOCDIR} +pdfdir, PDF documentation, ${DOCDIR} +psdir, PostScript documentation, ${DOCDIR} +'''%Utils.lib64() +_options=[x.split(', ')for x in gnuopts.splitlines()if x] +def configure(conf): + def get_param(varname,default): + return getattr(Options.options,varname,'')or default + env=conf.env + env.LIBDIR=env.BINDIR=[] + env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX) + env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE + complete=False + iter=0 + while not complete and iter\d*)\.(?P\d*)",re.I).search + if Utils.is_win32: + cmd=fc + else: + cmd=fc+['-logo'] + out,err=fc_config.getoutput(conf,cmd,stdin=False) + match=version_re(out)or version_re(err) + if not match: + conf.fatal('cannot determine ifort version.') + k=match.groupdict() + conf.env['FC_VERSION']=(k['major'],k['minor']) +def configure(conf): + conf.find_ifort() + conf.find_program('xiar',var='AR') + conf.find_ar() + conf.fc_flags() + conf.fc_add_flags() + conf.ifort_modifier_platform() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py new file mode 100644 index 00000000..c751e266 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py @@ -0,0 +1,97 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Context,Task,Utils,Logs +import waflib.Tools.ccroot +from waflib.TaskGen import feature,before_method,taskgen_method +from waflib.Logs import error +from waflib.Configure import conf +_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',} +@taskgen_method +def ensure_localedir(self): + if not self.env.LOCALEDIR: + if self.env.DATAROOTDIR: + self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale') + else: + self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale') +@before_method('process_source') +@feature('intltool_in') +def apply_intltool_in_f(self): + try:self.meths.remove('process_source') + except ValueError:pass + self.ensure_localedir() + podir=getattr(self,'podir','.') + podirnode=self.path.find_dir(podir) + if not podirnode: + error("could not find the podir %r"%podir) + return + cache=getattr(self,'intlcache','.intlcache') + self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)] + self.env.INTLPODIR=podirnode.bldpath() + self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT)) + if'-c'in self.env.INTLFLAGS: + self.bld.fatal('Redundant -c flag in intltool task %r'%self) + style=getattr(self,'style',None) + if style: + try: + style_flag=_style_flags[style] + except KeyError: + self.bld.fatal('intltool_in style "%s" is not valid'%style) + self.env.append_unique('INTLFLAGS',[style_flag]) + for i in self.to_list(self.source): + node=self.path.find_resource(i) + task=self.create_task('intltool',node,node.change_ext('')) + inst=getattr(self,'install_path',None) + if inst: + self.bld.install_files(inst,task.outputs) +@feature('intltool_po') +def apply_intltool_po(self): + try:self.meths.remove('process_source') + except ValueError:pass + self.ensure_localedir() + appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name')) + podir=getattr(self,'podir','.') + inst=getattr(self,'install_path','${LOCALEDIR}') + linguas=self.path.find_node(os.path.join(podir,'LINGUAS')) + if linguas: + file=open(linguas.abspath()) + langs=[] + for line in file.readlines(): + if not line.startswith('#'): + langs+=line.split() + file.close() + re_linguas=re.compile('[-a-zA-Z_@.]+') + for lang in langs: + if re_linguas.match(lang): + node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po')) + task=self.create_task('po',node,node.change_ext('.mo')) + if inst: + filename=task.outputs[0].name + (langname,ext)=os.path.splitext(filename) + inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo' + self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env) + else: + Logs.pprint('RED',"Error no LINGUAS file found in po directory") +class po(Task.Task): + run_str='${MSGFMT} -o ${TGT} ${SRC}' + color='BLUE' +class intltool(Task.Task): + run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' + color='BLUE' +@conf +def find_msgfmt(conf): + conf.find_program('msgfmt',var='MSGFMT') +@conf +def find_intltool_merge(conf): + if not conf.env.PERL: + conf.find_program('perl',var='PERL') + conf.env.INTLCACHE_ST='--cache=%s' + conf.env.INTLFLAGS_DEFAULT=['-q','-u'] + conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL') +def configure(conf): + conf.find_msgfmt() + conf.find_intltool_merge() + if conf.env.CC or conf.env.CXX: + conf.check(header_name='locale.h') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py new file mode 100644 index 00000000..74a36cf9 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py @@ -0,0 +1,45 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_irixcc(conf): + v=conf.env + cc=None + if v['CC']:cc=v['CC'] + elif'CC'in conf.environ:cc=conf.environ['CC'] + if not cc:cc=conf.find_program('cc',var='CC') + if not cc:conf.fatal('irixcc was not found') + try: + conf.cmd_and_log(cc+['-version']) + except Exception: + conf.fatal('%r -version could not be executed'%cc) + v['CC']=cc + v['CC_NAME']='irix' +@conf +def irixcc_common_flags(conf): + v=conf.env + v['CC_SRC_F']='' + v['CC_TGT_F']=['-c','-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['cprogram_PATTERN']='%s' + v['cshlib_PATTERN']='lib%s.so' + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_irixcc() + conf.find_cpp() + conf.find_ar() + conf.irixcc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py new file mode 100644 index 00000000..9009a7ae --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py @@ -0,0 +1,305 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,tempfile,shutil +from waflib import Task,Utils,Errors,Node,Logs +from waflib.Configure import conf +from waflib.TaskGen import feature,before_method,after_method +from waflib.Tools import ccroot +ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS']) +SOURCE_RE='**/*.java' +JAR_RE='**/*' +class_check_source=''' +public class Test { + public static void main(String[] argv) { + Class lib; + if (argv.length < 1) { + System.err.println("Missing argument"); + System.exit(77); + } + try { + lib = Class.forName(argv[0]); + } catch (ClassNotFoundException e) { + System.err.println("ClassNotFoundException"); + System.exit(1); + } + lib = null; + System.exit(0); + } +} +''' +@feature('javac') +@before_method('process_source') +def apply_java(self): + Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[]) + outdir=getattr(self,'outdir',None) + if outdir: + if not isinstance(outdir,Node.Node): + outdir=self.path.get_bld().make_node(self.outdir) + else: + outdir=self.path.get_bld() + outdir.mkdir() + self.outdir=outdir + self.env['OUTDIR']=outdir.abspath() + self.javac_task=tsk=self.create_task('javac') + tmp=[] + srcdir=getattr(self,'srcdir','') + if isinstance(srcdir,Node.Node): + srcdir=[srcdir] + for x in Utils.to_list(srcdir): + if isinstance(x,Node.Node): + y=x + else: + y=self.path.find_dir(x) + if not y: + self.bld.fatal('Could not find the folder %s from %s'%(x,self.path)) + tmp.append(y) + tsk.srcdir=tmp + if getattr(self,'compat',None): + tsk.env.append_value('JAVACFLAGS',['-source',self.compat]) + if hasattr(self,'sourcepath'): + fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)] + names=os.pathsep.join([x.srcpath()for x in fold]) + else: + names=[x.srcpath()for x in tsk.srcdir] + if names: + tsk.env.append_value('JAVACFLAGS',['-sourcepath',names]) +@feature('javac') +@after_method('apply_java') +def use_javac_files(self): + lst=[] + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + lst.append(y.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(y.jar_task) + if lst: + self.env.append_value('CLASSPATH',lst) +@feature('javac') +@after_method('apply_java','propagate_uselib_vars','use_javac_files') +def set_classpath(self): + self.env.append_value('CLASSPATH',getattr(self,'classpath',[])) + for x in self.tasks: + x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep +@feature('jar') +@after_method('apply_java','use_javac_files') +@before_method('process_source') +def jar_files(self): + destfile=getattr(self,'destfile','test.jar') + jaropts=getattr(self,'jaropts',[]) + manifest=getattr(self,'manifest',None) + basedir=getattr(self,'basedir',None) + if basedir: + if not isinstance(self.basedir,Node.Node): + basedir=self.path.get_bld().make_node(basedir) + else: + basedir=self.path.get_bld() + if not basedir: + self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self)) + self.jar_task=tsk=self.create_task('jar_create') + if manifest: + jarcreate=getattr(self,'jarcreate','cfm') + node=self.path.find_node(manifest) + tsk.dep_nodes.append(node) + jaropts.insert(0,node.abspath()) + else: + jarcreate=getattr(self,'jarcreate','cf') + if not isinstance(destfile,Node.Node): + destfile=self.path.find_or_declare(destfile) + if not destfile: + self.bld.fatal('invalid destfile %r for %r'%(destfile,self)) + tsk.set_outputs(destfile) + tsk.basedir=basedir + jaropts.append('-C') + jaropts.append(basedir.bldpath()) + jaropts.append('.') + tsk.env['JAROPTS']=jaropts + tsk.env['JARCREATE']=jarcreate + if getattr(self,'javac_task',None): + tsk.set_run_after(self.javac_task) +@feature('jar') +@after_method('jar_files') +def use_jar_files(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + names=self.to_list(getattr(self,'use',[])) + get=self.bld.get_tgen_by_name + for x in names: + try: + y=get(x) + except Exception: + self.uselib.append(x) + else: + y.post() + self.jar_task.run_after.update(y.tasks) +class jar_create(Task.Task): + color='GREEN' + run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global JAR_RE + try: + self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])] + except Exception: + raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self)) + return super(jar_create,self).runnable_status() +class javac(Task.Task): + color='BLUE' + vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR'] + def runnable_status(self): + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + if not self.inputs: + global SOURCE_RE + self.inputs=[] + for x in self.srcdir: + self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False)) + return super(javac,self).runnable_status() + def run(self): + env=self.env + gen=self.generator + bld=gen.bld + wd=bld.bldnode.abspath() + def to_list(xx): + if isinstance(xx,str):return[xx] + return xx + cmd=[] + cmd.extend(to_list(env['JAVAC'])) + cmd.extend(['-classpath']) + cmd.extend(to_list(env['CLASSPATH'])) + cmd.extend(['-d']) + cmd.extend(to_list(env['OUTDIR'])) + cmd.extend(to_list(env['JAVACFLAGS'])) + files=[a.path_from(bld.bldnode)for a in self.inputs] + tmp=None + try: + if len(str(files))+len(str(cmd))>8192: + (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath()) + try: + os.write(fd,'\n'.join(files).encode()) + finally: + if tmp: + os.close(fd) + if Logs.verbose: + Logs.debug('runner: %r'%(cmd+files)) + cmd.append('@'+tmp) + else: + cmd+=files + ret=self.exec_command(cmd,cwd=wd,env=env.env or None) + finally: + if tmp: + os.remove(tmp) + return ret + def post_run(self): + for n in self.generator.outdir.ant_glob('**/*.class'): + n.sig=Utils.h_file(n.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +@feature('javadoc') +@after_method('process_rule') +def create_javadoc(self): + tsk=self.create_task('javadoc') + tsk.classpath=getattr(self,'classpath',[]) + self.javadoc_package=Utils.to_list(self.javadoc_package) + if not isinstance(self.javadoc_output,Node.Node): + self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output) +class javadoc(Task.Task): + color='BLUE' + def __str__(self): + return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output) + def run(self): + env=self.env + bld=self.generator.bld + wd=bld.bldnode.abspath() + srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir + srcpath+=os.pathsep + srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir + classpath=env.CLASSPATH + classpath+=os.pathsep + classpath+=os.pathsep.join(self.classpath) + classpath="".join(classpath) + self.last_cmd=lst=[] + lst.extend(Utils.to_list(env['JAVADOC'])) + lst.extend(['-d',self.generator.javadoc_output.abspath()]) + lst.extend(['-sourcepath',srcpath]) + lst.extend(['-classpath',classpath]) + lst.extend(['-subpackages']) + lst.extend(self.generator.javadoc_package) + lst=[x for x in lst if x] + self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) + def post_run(self): + nodes=self.generator.javadoc_output.ant_glob('**') + for x in nodes: + x.sig=Utils.h_file(x.abspath()) + self.generator.bld.task_sigs[self.uid()]=self.cache_sig +def configure(self): + java_path=self.environ['PATH'].split(os.pathsep) + v=self.env + if'JAVA_HOME'in self.environ: + java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path + self.env['JAVA_HOME']=[self.environ['JAVA_HOME']] + for x in'javac java jar javadoc'.split(): + self.find_program(x,var=x.upper(),path_list=java_path) + if'CLASSPATH'in self.environ: + v['CLASSPATH']=self.environ['CLASSPATH'] + if not v['JAR']:self.fatal('jar is required for making java packages') + if not v['JAVAC']:self.fatal('javac is required for compiling java classes') + v['JARCREATE']='cf' + v['JAVACFLAGS']=[] +@conf +def check_java_class(self,classname,with_classpath=None): + javatestdir='.waf-javatest' + classpath=javatestdir + if self.env['CLASSPATH']: + classpath+=os.pathsep+self.env['CLASSPATH'] + if isinstance(with_classpath,str): + classpath+=os.pathsep+with_classpath + shutil.rmtree(javatestdir,True) + os.mkdir(javatestdir) + Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source) + self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False) + cmd=self.env['JAVA']+['-cp',classpath,'Test',classname] + self.to_log("%s\n"%str(cmd)) + found=self.exec_command(cmd,shell=False) + self.msg('Checking for java class %s'%classname,not found) + shutil.rmtree(javatestdir,True) + return found +@conf +def check_jni_headers(conf): + if not conf.env.CC_NAME and not conf.env.CXX_NAME: + conf.fatal('load a compiler first (gcc, g++, ..)') + if not conf.env.JAVA_HOME: + conf.fatal('set JAVA_HOME in the system environment') + javaHome=conf.env['JAVA_HOME'][0] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include') + if dir is None: + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers') + if dir is None: + conf.fatal('JAVA_HOME does not seem to be set properly') + f=dir.ant_glob('**/(jni|jni_md).h') + incDirs=[x.parent.abspath()for x in f] + dir=conf.root.find_dir(conf.env.JAVA_HOME[0]) + f=dir.ant_glob('**/*jvm.(so|dll|dylib)') + libDirs=[x.parent.abspath()for x in f]or[javaHome] + f=dir.ant_glob('**/*jvm.(lib)') + if f: + libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f] + for d in libDirs: + try: + conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA') + except Exception: + pass + else: + break + else: + conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py new file mode 100644 index 00000000..3e90377a --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py @@ -0,0 +1,48 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Task,Utils +from waflib.TaskGen import feature +@feature('msgfmt') +def apply_msgfmt(self): + for lang in self.to_list(self.langs): + node=self.path.find_resource(lang+'.po') + task=self.create_task('msgfmt',node,node.change_ext('.mo')) + langname=lang.split('/') + langname=langname[-1] + inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}') + self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644)) +class msgfmt(Task.Task): + color='BLUE' + run_str='${MSGFMT} ${SRC} -o ${TGT}' +def configure(self): + kdeconfig=self.find_program('kde4-config') + prefix=self.cmd_and_log(kdeconfig+['--prefix']).strip() + fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError: + fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix + try:os.stat(fname) + except OSError:self.fatal('could not open %s'%fname) + try: + txt=Utils.readf(fname) + except EnvironmentError: + self.fatal('could not read %s'%fname) + txt=txt.replace('\\\n','\n') + fu=re.compile('#(.*)\n') + txt=fu.sub('',txt) + setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') + found=setregexp.findall(txt) + for(_,key,val)in found: + self.env[key]=val + self.env['LIB_KDECORE']=['kdecore'] + self.env['LIB_KDEUI']=['kdeui'] + self.env['LIB_KIO']=['kio'] + self.env['LIB_KHTML']=['khtml'] + self.env['LIB_KPARTS']=['kparts'] + self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR] + self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']] + self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE']) + self.find_program('msgfmt',var='MSGFMT') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py new file mode 100644 index 00000000..75162e4d --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py @@ -0,0 +1,36 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ar,d +from waflib.Configure import conf +@conf +def find_ldc2(conf): + conf.find_program(['ldc2'],var='D') + out=conf.cmd_and_log(conf.env.D+['-version']) + if out.find("based on DMD v2.")==-1: + conf.fatal("detected compiler is not ldc2") +@conf +def common_flags_ldc2(conf): + v=conf.env + v['D_SRC_F']=['-c'] + v['D_TGT_F']='-of%s' + v['D_LINKER']=v['D'] + v['DLNK_SRC_F']='' + v['DLNK_TGT_F']='-of%s' + v['DINC_ST']='-I%s' + v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' + v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' + v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' + v['LINKFLAGS_dshlib']=['-L-shared'] + v['DHEADER_ext']='.di' + v['DFLAGS_d_with_header']=['-H','-Hf'] + v['D_HDR_F']='%s' + v['LINKFLAGS']=[] + v['DFLAGS_dshlib']=['-relocation-model=pic'] +def configure(conf): + conf.find_ldc2() + conf.load('ar') + conf.load('d') + conf.common_flags_ldc2() + conf.d_platform_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py new file mode 100644 index 00000000..b801d5fa --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py @@ -0,0 +1,18 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.TaskGen import extension +from waflib import Task +@extension('.lua') +def add_lua(self,node): + tsk=self.create_task('luac',node,node.change_ext('.luac')) + inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None) + if inst_to: + self.bld.install_files(inst_to,tsk.outputs) + return tsk +class luac(Task.Task): + run_str='${LUAC} -s -o ${TGT} ${SRC}' + color='PINK' +def configure(conf): + conf.find_program('luac',var='LUAC') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py new file mode 100644 index 00000000..5a194706 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py @@ -0,0 +1,809 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,sys,re,tempfile +from waflib import Utils,Task,Logs,Options,Errors +from waflib.Logs import debug,warn +from waflib.TaskGen import after_method,feature +from waflib.Configure import conf +from waflib.Tools import ccroot,c,cxx,ar,winres +g_msvc_systemlibs=''' +aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet +cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs +credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d +ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp +faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid +gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop +kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi +mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree +msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm +netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp +odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 +osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu +ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm +rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 +shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 +traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg +version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm +wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp +'''.split() +all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm'),('amd64_x86','x86'),('amd64_arm','arm')] +all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')] +all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] +def options(opt): + opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='') + opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='') + opt.add_option('--msvc_lazy_autodetect',action='store_true',help='lazily check msvc target environments') +def setup_msvc(conf,versions,arch=False): + platforms=getattr(Options.options,'msvc_targets','').split(',') + if platforms==['']: + platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] + desired_versions=getattr(Options.options,'msvc_version','').split(',') + if desired_versions==['']: + desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] + versiondict=dict(versions) + for version in desired_versions: + try: + targets=dict(versiondict[version]) + for target in platforms: + try: + try: + realtarget,(p1,p2,p3)=targets[target] + except conf.errors.ConfigurationError: + del(targets[target]) + else: + compiler,revision=version.rsplit(' ',1) + if arch: + return compiler,revision,p1,p2,p3,realtarget + else: + return compiler,revision,p1,p2,p3 + except KeyError:continue + except KeyError:continue + conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') +@conf +def get_msvc_version(conf,compiler,version,target,vcvars): + debug('msvc: get_msvc_version: %r %r %r',compiler,version,target) + try: + conf.msvc_cnt+=1 + except AttributeError: + conf.msvc_cnt=1 + batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) + batfile.write("""@echo off +set INCLUDE= +set LIB= +call "%s" %s +echo PATH=%%PATH%% +echo INCLUDE=%%INCLUDE%% +echo LIB=%%LIB%%;%%LIBPATH%% +"""%(vcvars,target)) + sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()]) + lines=sout.splitlines() + if not lines[0]: + lines.pop(0) + MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None + for line in lines: + if line.startswith('PATH='): + path=line[5:] + MSVC_PATH=path.split(';') + elif line.startswith('INCLUDE='): + MSVC_INCDIR=[i for i in line[8:].split(';')if i] + elif line.startswith('LIB='): + MSVC_LIBDIR=[i for i in line[4:].split(';')if i] + if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): + conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') + env=dict(os.environ) + env.update(PATH=path) + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + cxx=conf.find_program(compiler_name,path_list=MSVC_PATH) + if'CL'in env: + del(env['CL']) + try: + try: + conf.cmd_and_log(cxx+['/help'],env=env) + except UnicodeError: + st=Utils.ex_stack() + if conf.logger: + conf.logger.error(st) + conf.fatal('msvc: Unicode error - check the code page?') + except Exception as e: + debug('msvc: get_msvc_version: %r %r %r -> failure %s'%(compiler,version,target,str(e))) + conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') + else: + debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target) + finally: + conf.env[compiler_name]='' + return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) +@conf +def gather_wsdk_versions(conf,versions): + version_pattern=re.compile('^v..?.?\...?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + try: + msvc_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') + except WindowsError: + continue + if path and os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): + targets=[] + for target,arch in all_msvc_platforms: + try: + targets.append((target,(arch,get_compiler_env(conf,'wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))))) + except conf.errors.ConfigurationError: + pass + versions.append(('wsdk '+version[1:],targets)) +def gather_wince_supported_platforms(): + supported_wince_platforms=[] + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + try: + ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') + except WindowsError: + ce_sdk='' + if not ce_sdk: + return supported_wince_platforms + ce_index=0 + while 1: + try: + sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index) + except WindowsError: + break + ce_index=ce_index+1 + sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device) + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir') + except WindowsError: + try: + path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation') + path,xml=os.path.split(path) + except WindowsError: + continue + path=str(path) + path,device=os.path.split(path) + if not device: + path,device=os.path.split(path) + platforms=[] + for arch,compiler in all_wince_platforms: + if os.path.isdir(os.path.join(path,device,'Lib',arch)): + platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch))) + if platforms: + supported_wince_platforms.append((device,platforms)) + return supported_wince_platforms +def gather_msvc_detected_versions(): + version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$') + detected_versions=[] + for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')): + try: + prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + try: + prefix='SOFTWARE\\Microsoft\\'+vcver + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) + except WindowsError: + continue + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + match=version_pattern.match(version) + if not match: + continue + else: + versionnumber=float(match.group(1)) + detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version)) + def fun(tup): + return tup[0] + detected_versions.sort(key=fun) + return detected_versions +def get_compiler_env(conf,compiler,version,bat_target,bat,select=None): + lazy=getattr(Options.options,'msvc_lazy_autodetect',False)or conf.env['MSVC_LAZY_AUTODETECT'] + def msvc_thunk(): + vs=conf.get_msvc_version(compiler,version,bat_target,bat) + if select: + return select(vs) + else: + return vs + return lazytup(msvc_thunk,lazy,([],[],[])) +class lazytup(object): + def __init__(self,fn,lazy=True,default=None): + self.fn=fn + self.default=default + if not lazy: + self.evaluate() + def __len__(self): + self.evaluate() + return len(self.value) + def __iter__(self): + self.evaluate() + for i,v in enumerate(self.value): + yield v + def __getitem__(self,i): + self.evaluate() + return self.value[i] + def __repr__(self): + if hasattr(self,'value'): + return repr(self.value) + elif self.default: + return repr(self.default) + else: + self.evaluate() + return repr(self.value) + def evaluate(self): + if hasattr(self,'value'): + return + self.value=self.fn() +@conf +def gather_msvc_targets(conf,versions,version,vc_path): + targets=[] + if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')): + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,get_compiler_env(conf,'msvc',version,target,os.path.join(vc_path,'vcvarsall.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')): + try: + targets.append(('x86',('x86',get_compiler_env(conf,'msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')): + try: + targets.append(('x86',('x86',get_compiler_env(conf,'msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))))) + except conf.errors.ConfigurationError: + pass + if targets: + versions.append(('msvc '+version,targets)) +@conf +def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms): + for device,platforms in supported_platforms: + cetargets=[] + for platform,compiler,include,lib in platforms: + winCEpath=os.path.join(vc_path,'ce') + if not os.path.isdir(winCEpath): + continue + if os.path.isdir(os.path.join(winCEpath,'lib',platform)): + bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)] + incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include] + libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib] + def combine_common(compiler_env): + (common_bindirs,_1,_2)=compiler_env + return(bindirs+common_bindirs,incdirs,libdirs) + try: + cetargets.append((platform,(platform,get_compiler_env(conf,'msvc',version,'x86',vsvars,combine_common)))) + except conf.errors.ConfigurationError: + continue + if cetargets: + versions.append((device+' '+version,cetargets)) +@conf +def gather_winphone_targets(conf,versions,version,vc_path,vsvars): + targets=[] + for target,realtarget in all_msvc_platforms[::-1]: + try: + targets.append((target,(realtarget,get_compiler_env(conf,'winphone',version,target,vsvars)))) + except conf.errors.ConfigurationError: + pass + if targets: + versions.append(('winphone '+version,targets)) +@conf +def gather_msvc_versions(conf,versions): + vc_paths=[] + for(v,version,reg)in gather_msvc_detected_versions(): + try: + try: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC") + except WindowsError: + msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++") + path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir') + vc_paths.append((version,os.path.abspath(str(path)))) + except WindowsError: + continue + wince_supported_platforms=gather_wince_supported_platforms() + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat') + if wince_supported_platforms and os.path.isfile(vsvars): + conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms) + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat') + if os.path.isfile(vsvars): + conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars) + break + for version,vc_path in vc_paths: + vs_path=os.path.dirname(vc_path) + conf.gather_msvc_targets(versions,version,vc_path) +@conf +def gather_icl_versions(conf,versions): + version_pattern=re.compile('^...?.?\....?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + pass + for target,arch in all_icl_platforms: + try: + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + except WindowsError: + continue + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def gather_intel_composer_versions(conf,versions): + version_pattern=re.compile('^...?.?\...?.?.?') + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') + except WindowsError: + try: + all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') + except WindowsError: + return + index=0 + while 1: + try: + version=Utils.winreg.EnumKey(all_versions,index) + except WindowsError: + break + index=index+1 + if not version_pattern.match(version): + continue + targets=[] + for target,arch in all_icl_platforms: + try: + if target=='intel64':targetDir='EM64T_NATIVE' + else:targetDir=target + try: + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) + except WindowsError: + if targetDir=='EM64T_NATIVE': + defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') + else: + raise WindowsError + uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') + Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) + icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') + path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') + batch_file=os.path.join(path,'bin','iclvars.bat') + if os.path.isfile(batch_file): + try: + targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) + except conf.errors.ConfigurationError: + pass + compilervars_warning_attr='_compilervars_warning_key' + if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): + setattr(conf,compilervars_warning_attr,False) + patch_url='http://software.intel.com/en-us/forums/topic/328487' + compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') + for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'): + if vscomntool in os.environ: + vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' + dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' + if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): + Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) + except WindowsError: + pass + major=version[0:2] + versions.append(('intel '+major,targets)) +@conf +def get_msvc_versions(conf,eval_and_save=True): + if conf.env['MSVC_INSTALLED_VERSIONS']: + return conf.env['MSVC_INSTALLED_VERSIONS'] + lst=[] + conf.gather_icl_versions(lst) + conf.gather_intel_composer_versions(lst) + conf.gather_wsdk_versions(lst) + conf.gather_msvc_versions(lst) + if eval_and_save: + def checked_target(t): + target,(arch,paths)=t + try: + paths.evaluate() + except conf.errors.ConfigurationError: + return None + else: + return t + lst=[(version,list(filter(checked_target,targets)))for version,targets in lst] + conf.env['MSVC_INSTALLED_VERSIONS']=lst + return lst +@conf +def print_all_msvc_detected(conf): + for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: + Logs.info(version) + for target,l in targets: + Logs.info("\t"+target) +@conf +def detect_msvc(conf,arch=False): + lazy_detect=getattr(Options.options,'msvc_lazy_autodetect',False)or conf.env['MSVC_LAZY_AUTODETECT'] + versions=get_msvc_versions(conf,not lazy_detect) + return setup_msvc(conf,versions,arch) +@conf +def find_lt_names_msvc(self,libname,is_static=False): + lt_names=['lib%s.la'%libname,'%s.la'%libname,] + for path in self.env['LIBPATH']: + for la in lt_names: + laf=os.path.join(path,la) + dll=None + if os.path.exists(laf): + ltdict=Utils.read_la_file(laf) + lt_libdir=None + if ltdict.get('libdir',''): + lt_libdir=ltdict['libdir'] + if not is_static and ltdict.get('library_names',''): + dllnames=ltdict['library_names'].split() + dll=dllnames[0].lower() + dll=re.sub('\.dll$','',dll) + return(lt_libdir,dll,False) + elif ltdict.get('old_library',''): + olib=ltdict['old_library'] + if os.path.exists(os.path.join(path,olib)): + return(path,olib,True) + elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): + return(lt_libdir,olib,True) + else: + return(None,olib,True) + else: + raise self.errors.WafError('invalid libtool object file: %s'%laf) + return(None,None,None) +@conf +def libname_msvc(self,libname,is_static=False): + lib=libname.lower() + lib=re.sub('\.lib$','',lib) + if lib in g_msvc_systemlibs: + return lib + lib=re.sub('^lib','',lib) + if lib=='m': + return None + (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) + if lt_path!=None and lt_libname!=None: + if lt_static==True: + return os.path.join(lt_path,lt_libname) + if lt_path!=None: + _libpaths=[lt_path]+self.env['LIBPATH'] + else: + _libpaths=self.env['LIBPATH'] + static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,] + dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] + libnames=static_libs + if not is_static: + libnames=dynamic_libs+static_libs + for path in _libpaths: + for libn in libnames: + if os.path.exists(os.path.join(path,libn)): + debug('msvc: lib found: %s'%os.path.join(path,libn)) + return re.sub('\.lib$','',libn) + self.fatal("The library %r could not be found"%libname) + return re.sub('\.lib$','',libname) +@conf +def check_lib_msvc(self,libname,is_static=False,uselib_store=None): + libn=self.libname_msvc(libname,is_static) + if not uselib_store: + uselib_store=libname.upper() + if False and is_static: + self.env['STLIB_'+uselib_store]=[libn] + else: + self.env['LIB_'+uselib_store]=[libn] +@conf +def check_libs_msvc(self,libnames,is_static=False): + for libname in Utils.to_list(libnames): + self.check_lib_msvc(libname,is_static) +def configure(conf): + conf.autodetect(True) + conf.find_msvc() + conf.msvc_common_flags() + conf.cc_load_tools() + conf.cxx_load_tools() + conf.cc_add_flags() + conf.cxx_add_flags() + conf.link_add_flags() + conf.visual_studio_add_flags() +@conf +def no_autodetect(conf): + conf.env.NO_MSVC_DETECT=1 + configure(conf) +@conf +def autodetect(conf,arch=False): + v=conf.env + if v.NO_MSVC_DETECT: + return + if arch: + compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True) + v['DEST_CPU']=arch + else: + compiler,version,path,includes,libdirs=conf.detect_msvc() + v['PATH']=path + v['INCLUDES']=includes + v['LIBPATH']=libdirs + v['MSVC_COMPILER']=compiler + try: + v['MSVC_VERSION']=float(version) + except Exception: + v['MSVC_VERSION']=float(version[:-3]) +def _get_prog_names(conf,compiler): + if compiler=='intel': + compiler_name='ICL' + linker_name='XILINK' + lib_name='XILIB' + else: + compiler_name='CL' + linker_name='LINK' + lib_name='LIB' + return compiler_name,linker_name,lib_name +@conf +def find_msvc(conf): + if sys.platform=='cygwin': + conf.fatal('MSVC module does not work under cygwin Python!') + v=conf.env + path=v['PATH'] + compiler=v['MSVC_COMPILER'] + version=v['MSVC_VERSION'] + compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) + v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) + cxx=conf.find_program(compiler_name,var='CXX',path_list=path) + env=dict(conf.environ) + if path:env.update(PATH=';'.join(path)) + if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): + conf.fatal('the msvc compiler could not be identified') + v['CC']=v['CXX']=cxx + v['CC_NAME']=v['CXX_NAME']='msvc' + if not v['LINK_CXX']: + link=conf.find_program(linker_name,path_list=path) + if link:v['LINK_CXX']=link + else:conf.fatal('%s was not found (linker)'%linker_name) + v['LINK']=link + if not v['LINK_CC']: + v['LINK_CC']=v['LINK_CXX'] + if not v['AR']: + stliblink=conf.find_program(lib_name,path_list=path,var='AR') + if not stliblink:return + v['ARFLAGS']=['/NOLOGO'] + if v.MSVC_MANIFEST: + conf.find_program('MT',path_list=path,var='MT') + v['MTFLAGS']=['/NOLOGO'] + try: + conf.load('winres') + except Errors.WafError: + warn('Resource compiler not found. Compiling resource file is disabled') +@conf +def visual_studio_add_flags(self): + v=self.env + try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x]) + except Exception:pass + try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x]) + except Exception:pass +@conf +def msvc_common_flags(conf): + v=conf.env + v['DEST_BINFMT']='pe' + v.append_value('CFLAGS',['/nologo']) + v.append_value('CXXFLAGS',['/nologo']) + v['DEFINES_ST']='/D%s' + v['CC_SRC_F']='' + v['CC_TGT_F']=['/c','/Fo'] + v['CXX_SRC_F']='' + v['CXX_TGT_F']=['/c','/Fo'] + if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6): + v['CC_TGT_F']=['/FC']+v['CC_TGT_F'] + v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F'] + v['CPPPATH_ST']='/I%s' + v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:' + v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE'] + v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE'] + v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX'] + v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS'] + v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE'] + v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT'] + v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD'] + v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd'] + v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd'] + v['LIB_ST']='%s.lib' + v['LIBPATH_ST']='/LIBPATH:%s' + v['STLIB_ST']='%s.lib' + v['STLIBPATH_ST']='/LIBPATH:%s' + v.append_value('LINKFLAGS',['/NOLOGO']) + if v['MSVC_MANIFEST']: + v.append_value('LINKFLAGS',['/MANIFEST']) + v['CFLAGS_cshlib']=[] + v['CXXFLAGS_cxxshlib']=[] + v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL'] + v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll' + v['implib_PATTERN']='%s.lib' + v['IMPLIB_ST']='/IMPLIB:%s' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib' + v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe' +@after_method('apply_link') +@feature('c','cxx') +def apply_flags_msvc(self): + if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None): + return + is_static=isinstance(self.link_task,ccroot.stlink_task) + subsystem=getattr(self,'subsystem','') + if subsystem: + subsystem='/subsystem:%s'%subsystem + flags=is_static and'ARFLAGS'or'LINKFLAGS' + self.env.append_value(flags,subsystem) + if not is_static: + for f in self.env.LINKFLAGS: + d=f.lower() + if d[1:]=='debug': + pdbnode=self.link_task.outputs[0].change_ext('.pdb') + self.link_task.outputs.append(pdbnode) + if getattr(self,'install_task',None): + self.pdb_install_task=self.bld.install_files(self.install_task.dest,pdbnode,env=self.env) + break +@feature('cprogram','cshlib','cxxprogram','cxxshlib') +@after_method('apply_link') +def apply_manifest(self): + if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None): + out_node=self.link_task.outputs[0] + man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') + self.link_task.outputs.append(man_node) + self.link_task.do_manifest=True +def exec_mf(self): + env=self.env + mtool=env['MT'] + if not mtool: + return 0 + self.do_manifest=False + outfile=self.outputs[0].abspath() + manifest=None + for out_node in self.outputs: + if out_node.name.endswith('.manifest'): + manifest=out_node.abspath() + break + if manifest is None: + return 0 + mode='' + if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features: + mode='1' + elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features: + mode='2' + debug('msvc: embedding manifest in mode %r'%mode) + lst=[]+mtool + lst.extend(Utils.to_list(env['MTFLAGS'])) + lst.extend(['-manifest',manifest]) + lst.append('-outputresource:%s;%s'%(outfile,mode)) + return self.exec_command(lst) +def quote_response_command(self,flag): + if flag.find(' ')>-1: + for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'): + if flag.startswith(x): + flag='%s"%s"'%(x,flag[len(x):]) + break + else: + flag='"%s"'%flag + return flag +def exec_response_command(self,cmd,**kw): + try: + tmp=None + if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192: + program=cmd[0] + cmd=[self.quote_response_command(x)for x in cmd] + (fd,tmp)=tempfile.mkstemp() + os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]).encode()) + os.close(fd) + cmd=[program,'@'+tmp] + ret=self.generator.bld.exec_command(cmd,**kw) + finally: + if tmp: + try: + os.remove(tmp) + except OSError: + pass + return ret +def exec_command_msvc(self,*k,**kw): + if isinstance(k[0],list): + lst=[] + carry='' + for a in k[0]: + if a=='/Fo'or a=='/doc'or a[-1]==':': + carry=a + else: + lst.append(carry+a) + carry='' + k=[lst] + if self.env['PATH']: + env=dict(self.env.env or os.environ) + env.update(PATH=';'.join(self.env['PATH'])) + kw['env']=env + bld=self.generator.bld + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + ret=self.exec_response_command(k[0],**kw) + if not ret and getattr(self,'do_manifest',None): + ret=self.exec_mf() + return ret +def wrap_class(class_name): + cls=Task.classes.get(class_name,None) + if not cls: + return None + derived_class=type(class_name,(cls,),{}) + def exec_command(self,*k,**kw): + if self.env['CC_NAME']=='msvc': + return self.exec_command_msvc(*k,**kw) + else: + return super(derived_class,self).exec_command(*k,**kw) + derived_class.exec_command=exec_command + derived_class.exec_response_command=exec_response_command + derived_class.quote_response_command=quote_response_command + derived_class.exec_command_msvc=exec_command_msvc + derived_class.exec_mf=exec_mf + if hasattr(cls,'hcode'): + derived_class.hcode=cls.hcode + return derived_class +for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split(): + wrap_class(k) +def make_winapp(self,family): + append=self.env.append_unique + append('DEFINES','WINAPI_FAMILY=%s'%family) + append('CXXFLAGS','/ZW') + append('CXXFLAGS','/TP') + for lib_path in self.env.LIBPATH: + append('CXXFLAGS','/AI%s'%lib_path) +@feature('winphoneapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_winphone_app(self): + make_winapp(self,'WINAPI_FAMILY_PHONE_APP') + conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib') + conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib') +@feature('winapp') +@after_method('process_use') +@after_method('propagate_uselib_vars') +def make_windows_app(self): + make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py new file mode 100644 index 00000000..a107298d --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py @@ -0,0 +1,16 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +import waflib.Tools.asm +from waflib.TaskGen import feature +@feature('asm') +def apply_nasm_vars(self): + self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[]))) +def configure(conf): + conf.find_program(['nasm','yasm'],var='AS') + conf.env.AS_TGT_F=['-o'] + conf.env.ASLNK_TGT_F=['-o'] + conf.load('asm') + conf.env.ASMPATH_ST='-I%s'+os.sep diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py new file mode 100644 index 00000000..d065f3f9 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py @@ -0,0 +1,90 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib import Task,Options,Utils +from waflib.Configure import conf +from waflib.TaskGen import extension,feature,before_method +@before_method('apply_incpaths','apply_link','propagate_uselib_vars') +@feature('perlext') +def init_perlext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT') + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN'] +@extension('.xs') +def xsubpp_file(self,node): + outnode=node.change_ext('.c') + self.create_task('xsubpp',node,outnode) + self.source.append(outnode) +class xsubpp(Task.Task): + run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' + color='BLUE' + ext_out=['.h'] +@conf +def check_perl_version(self,minver=None): + res=True + if minver: + cver='.'.join(map(str,minver)) + else: + cver='' + self.start_msg('Checking for minimum perl version %s'%cver) + perl=getattr(Options.options,'perlbinary',None) + if not perl: + perl=self.find_program('perl',var='PERL') + if not perl: + self.end_msg("Perl not found",color="YELLOW") + return False + self.env['PERL']=perl + version=self.cmd_and_log(self.env.PERL+["-e",'printf \"%vd\", $^V']) + if not version: + res=False + version="Unknown" + elif not minver is None: + ver=tuple(map(int,version.split("."))) + if ver +#ifdef __cplusplus +extern "C" { +#endif + void Py_Initialize(void); + void Py_Finalize(void); +#ifdef __cplusplus +} +#endif +int main(int argc, char **argv) +{ + (void)argc; (void)argv; + Py_Initialize(); + Py_Finalize(); + return 0; +} +''' +INST=''' +import sys, py_compile +py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True) +''' +DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib'] +@before_method('process_source') +@feature('py') +def feature_py(self): + self.install_path=getattr(self,'install_path','${PYTHONDIR}') + install_from=getattr(self,'install_from',None) + if install_from and not isinstance(install_from,Node.Node): + install_from=self.path.find_dir(install_from) + self.install_from=install_from + ver=self.env.PYTHON_VERSION + if not ver: + self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version') + if int(ver.replace('.',''))>31: + self.install_32=True +@extension('.py') +def process_py(self,node): + assert(getattr(self,'install_path')),'add features="py"' + if self.install_path: + if self.install_from: + self.bld.install_files(self.install_path,[node],cwd=self.install_from,relative_trick=True) + else: + self.bld.install_files(self.install_path,[node],relative_trick=True) + lst=[] + if self.env.PYC: + lst.append('pyc') + if self.env.PYO: + lst.append('pyo') + if self.install_path: + if self.install_from: + pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env) + else: + pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env) + else: + pyd=node.abspath() + for ext in lst: + if self.env.PYTAG: + name=node.name[:-3] + pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext)) + pyobj.parent.mkdir() + else: + pyobj=node.change_ext(".%s"%ext) + tsk=self.create_task(ext,node,pyobj) + tsk.pyd=pyd + if self.install_path: + self.bld.install_files(os.path.dirname(pyd),pyobj,cwd=node.parent.get_bld(),relative_trick=True) +class pyc(Task.Task): + color='PINK' + def run(self): + cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] + ret=self.generator.bld.exec_command(cmd) + return ret +class pyo(Task.Task): + color='PINK' + def run(self): + cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] + ret=self.generator.bld.exec_command(cmd) + return ret +@feature('pyext') +@before_method('propagate_uselib_vars','apply_link') +@after_method('apply_bundle') +def init_pyext(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEXT'in self.uselib: + self.uselib.append('PYEXT') + self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN + self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN + try: + if not self.install_path: + return + except AttributeError: + self.install_path='${PYTHONARCHDIR}' +@feature('pyext') +@before_method('apply_link','apply_bundle') +def set_bundle(self): + if Utils.unversioned_sys_platform()=='darwin': + self.mac_bundle=True +@before_method('propagate_uselib_vars') +@feature('pyembed') +def init_pyembed(self): + self.uselib=self.to_list(getattr(self,'uselib',[])) + if not'PYEMBED'in self.uselib: + self.uselib.append('PYEMBED') +@conf +def get_python_variables(self,variables,imports=None): + if not imports: + try: + imports=self.python_imports + except AttributeError: + imports=DISTUTILS_IMP + program=list(imports) + program.append('') + for v in variables: + program.append("print(repr(%s))"%v) + os_env=dict(os.environ) + try: + del os_env['MACOSX_DEPLOYMENT_TARGET'] + except KeyError: + pass + try: + out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env) + except Errors.WafError: + self.fatal('The distutils module is unusable: install "python-devel"?') + self.to_log(out) + return_values=[] + for s in out.splitlines(): + s=s.strip() + if not s: + continue + if s=='None': + return_values.append(None) + elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'): + return_values.append(eval(s)) + elif s[0].isdigit(): + return_values.append(int(s)) + else:break + return return_values +@conf +def test_pyembed(self,mode,msg='Testing pyembed configuration'): + self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg=msg,fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(mode,mode)) +@conf +def test_pyext(self,mode,msg='Testing pyext configuration'): + self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg=msg,fragment=FRAG,errmsg='Could not build python extensions',features='%s %sshlib pyext'%(mode,mode)) +@conf +def python_cross_compile(self,features='pyembed pyext'): + features=Utils.to_list(features) + if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ): + return False + for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split(): + if not x in self.environ: + self.fatal('Please set %s in the os environment'%x) + else: + self.env[x]=self.environ[x] + xx=self.env.CXX_NAME and'cxx'or'c' + if'pyext'in features: + flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None)) + if flags is None: + self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required') + else: + self.parse_flags(flags,'PYEXT') + self.test_pyext(xx) + if'pyembed'in features: + flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None)) + if flags is None: + self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required') + else: + self.parse_flags(flags,'PYEMBED') + self.test_pyembed(xx) + return True +@conf +def check_python_headers(conf,features='pyembed pyext'): + features=Utils.to_list(features) + assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'" + env=conf.env + if not env['CC_NAME']and not env['CXX_NAME']: + conf.fatal('load a compiler first (gcc, g++, ..)') + if conf.python_cross_compile(features): + return + if not env['PYTHON_VERSION']: + conf.check_python_version() + pybin=env.PYTHON + if not pybin: + conf.fatal('Could not find the python executable') + v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() + try: + lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) + except RuntimeError: + conf.fatal("Python development headers not found (-v for details).") + vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] + conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals))) + dct=dict(zip(v,lst)) + x='MACOSX_DEPLOYMENT_TARGET' + if dct[x]: + env[x]=conf.environ[x]=dct[x] + env['pyext_PATTERN']='%s'+dct['SO'] + num='.'.join(env['PYTHON_VERSION'].split('.')[:2]) + conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False) + if env.PYTHON_CONFIG: + all_flags=[['--cflags','--libs','--ldflags']] + if sys.hexversion<0x2070000: + all_flags=[[k]for k in all_flags[0]] + xx=env.CXX_NAME and'cxx'or'c' + if'pyembed'in features: + for flags in all_flags: + conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags) + try: + conf.test_pyembed(xx) + except conf.errors.ConfigurationError: + if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']]) + conf.test_pyembed(xx) + else: + raise + if'pyext'in features: + for flags in all_flags: + conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags) + try: + conf.test_pyext(xx) + except conf.errors.ConfigurationError: + if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: + env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']]) + conf.test_pyext(xx) + else: + raise + conf.define('HAVE_PYTHON_H',1) + return + all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEMBED') + all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] + conf.parse_flags(all_flags,'PYEXT') + result=None + if not dct["LDVERSION"]: + dct["LDVERSION"]=env['PYTHON_VERSION'] + for name in('python'+dct['LDVERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')): + if not result and env['LIBPATH_PYEMBED']: + path=env['LIBPATH_PYEMBED'] + conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) + if not result and dct['LIBDIR']: + path=[dct['LIBDIR']] + conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) + if not result and dct['LIBPL']: + path=[dct['LIBPL']] + conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) + if not result: + path=[os.path.join(dct['prefix'],"libs")] + conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") + result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) + if result: + break + if result: + env['LIBPATH_PYEMBED']=path + env.append_value('LIB_PYEMBED',[name]) + else: + conf.to_log("\n\n### LIB NOT FOUND\n") + if Utils.is_win32 or dct['Py_ENABLE_SHARED']: + env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED'] + env['LIB_PYEXT']=env['LIB_PYEMBED'] + conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],)) + env['INCLUDES_PYEXT']=[dct['INCLUDEPY']] + env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']] + if env['CC_NAME']=='gcc': + env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) + if env['CXX_NAME']=='gcc': + env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) + env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) + if env.CC_NAME=="msvc": + from distutils.msvccompiler import MSVCCompiler + dist_compiler=MSVCCompiler() + dist_compiler.initialize() + env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) + env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) + conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!') +@conf +def check_python_version(conf,minver=None): + assert minver is None or isinstance(minver,tuple) + pybin=conf.env['PYTHON'] + if not pybin: + conf.fatal('could not find the python executable') + cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))'] + Logs.debug('python: Running python command %r'%cmd) + lines=conf.cmd_and_log(cmd).split() + assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines) + pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) + result=(minver is None)or(pyver_tuple>=minver) + if result: + pyver='.'.join([str(x)for x in pyver_tuple[:2]]) + conf.env['PYTHON_VERSION']=pyver + if'PYTHONDIR'in conf.env: + pydir=conf.env['PYTHONDIR'] + elif'PYTHONDIR'in conf.environ: + pydir=conf.environ['PYTHONDIR'] + else: + if Utils.is_win32: + (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"]) + else: + python_LIBDEST=None + (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) + if python_LIBDEST is None: + if conf.env['LIBDIR']: + python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver) + else: + python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver) + if'PYTHONARCHDIR'in conf.env: + pyarchdir=conf.env['PYTHONARCHDIR'] + elif'PYTHONARCHDIR'in conf.environ: + pyarchdir=conf.environ['PYTHONARCHDIR'] + else: + (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) + if not pyarchdir: + pyarchdir=pydir + if hasattr(conf,'define'): + conf.define('PYTHONDIR',pydir) + conf.define('PYTHONARCHDIR',pyarchdir) + conf.env['PYTHONDIR']=pydir + conf.env['PYTHONARCHDIR']=pyarchdir + pyver_full='.'.join(map(str,pyver_tuple[:3])) + if minver is None: + conf.msg('Checking for python version',pyver_full) + else: + minver_str='.'.join(map(str,minver)) + conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW') + if not result: + conf.fatal('The python version is too old, expecting %r'%(minver,)) +PYTHON_MODULE_TEMPLATE=''' +import %s as current_module +version = getattr(current_module, '__version__', None) +if version is not None: + print(str(version)) +else: + print('unknown version') +''' +@conf +def check_python_module(conf,module_name,condition=''): + msg="Checking for python module '%s'"%module_name + if condition: + msg='%s (%s)'%(msg,condition) + conf.start_msg(msg) + try: + ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name]) + except Exception: + conf.end_msg(False) + conf.fatal('Could not find the python module %r'%module_name) + ret=ret.strip() + if condition: + conf.end_msg(ret) + if ret=='unknown version': + conf.fatal('Could not check the %s version'%module_name) + from distutils.version import LooseVersion + def num(*k): + if isinstance(k[0],int): + return LooseVersion('.'.join([str(x)for x in k])) + else: + return LooseVersion(k[0]) + d={'num':num,'ver':LooseVersion(ret)} + ev=eval(condition,{},d) + if not ev: + conf.fatal('The %s version does not satisfy the requirements'%module_name) + else: + if ret=='unknown version': + conf.end_msg(True) + else: + conf.end_msg(ret) +def configure(conf): + v=conf.env + v['PYTHON']=Options.options.python or os.environ.get('PYTHON',sys.executable) + if Options.options.pythondir: + v['PYTHONDIR']=Options.options.pythondir + if Options.options.pythonarchdir: + v['PYTHONARCHDIR']=Options.options.pythonarchdir + conf.find_program('python',var='PYTHON') + v['PYFLAGS']='' + v['PYFLAGS_OPT']='-O' + v['PYC']=getattr(Options.options,'pyc',1) + v['PYO']=getattr(Options.options,'pyo',1) + try: + v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import imp;print(imp.get_tag())"]).strip() + except Errors.WafError: + pass +def options(opt): + pyopt=opt.add_option_group("Python Options") + pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]') + pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]') + pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable) + pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)') + pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py new file mode 100644 index 00000000..896c5b43 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py @@ -0,0 +1,442 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +from waflib.Tools import cxx +from waflib import Task,Utils,Options,Errors,Context +from waflib.TaskGen import feature,after_method,extension +from waflib.Configure import conf +from waflib import Logs +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT4=['.cpp','.cc','.cxx','.C'] +QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" +class qxx(Task.classes['cxx']): + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def runnable_status(self): + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + def create_moc_task(self,h_node,m_node): + try: + moc_cache=self.generator.bld.moc_cache + except AttributeError: + moc_cache=self.generator.bld.moc_cache={} + try: + return moc_cache[h_node] + except KeyError: + tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + if self.generator: + self.generator.tasks.append(tsk) + gen=self.generator.bld.producer + gen.outstanding.insert(0,tsk) + gen.total+=1 + return tsk + def moc_h_ext(self): + ext=[] + try: + ext=Options.options.qt_header_ext.split() + except AttributeError: + pass + if not ext: + ext=MOC_H + return ext + def add_moc_tasks(self): + node=self.inputs[0] + bld=self.generator.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + include_nodes=[node.parent]+self.generator.includes_nodes + moctasks=[] + mocfiles=set([]) + for d in bld.raw_deps.get(self.uid(),[]): + if not d.endswith('.moc'): + continue + if d in mocfiles: + continue + mocfiles.add(d) + h_node=None + base2=d[:-4] + for x in include_nodes: + for e in self.moc_h_ext(): + h_node=x.find_node(base2+e) + if h_node: + break + if h_node: + m_node=h_node.change_ext('.moc') + break + else: + for k in EXT_QT4: + if base2.endswith(k): + for x in include_nodes: + h_node=x.find_node(base2) + if h_node: + break + if h_node: + m_node=h_node.change_ext(k+'.moc') + break + if not h_node: + raise Errors.WafError('No source found for %r which is a moc file'%d) + task=self.create_moc_task(h_node,m_node) + moctasks.append(task) + self.run_after.update(set(moctasks)) + self.moc_done=1 +class trans_update(Task.Task): + run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' + color='BLUE' +Task.update_outputs(trans_update) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(str(''.join(self.buf))) + def characters(self,cars): + self.buf.append(cars) +@extension(*EXT_RCC) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + self.create_task('rcc',node,rcnode) + cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks=[cpptask] + return cpptask +@extension(*EXT_UI) +def create_uic_task(self,node): + uictask=self.create_task('ui4',node) + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +@extension('.ts') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +@feature('qt4') +@after_method('apply_link') +def apply_qt4(self): + if getattr(self,'lang',None): + qmtasks=[] + for x in self.to_list(self.lang): + if isinstance(x,str): + x=self.path.find_resource(x+'.ts') + qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) + if getattr(self,'update',None)and Options.options.trans_qt4: + cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update',cxxnodes,x.inputs) + if getattr(self,'langname',None): + qmnodes=[x.outputs[0]for x in qmtasks] + rcnode=self.langname + if isinstance(rcnode,str): + rcnode=self.path.find_or_declare(rcnode+'.qrc') + t=self.create_task('qm2rcc',qmnodes,rcnode) + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + f=flag[0:2] + if f in('-D','-I','/D','/I'): + if(f[0]=='/'): + lst.append('-'+flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS',lst) +@extension(*EXT_QT4) +def cxx_hook(self,node): + return self.create_compiled_task('qxx',node) +class rcc(Task.Task): + color='BLUE' + run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out=['.h'] + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + def scan(self): + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return([],[]) + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(),'r') + try: + parser.parse(fi) + finally: + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +class moc(Task.Task): + color='BLUE' + run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' + def keyword(self): + return"Creating" + def __str__(self): + return self.outputs[0].path_from(self.generator.bld.launch_node()) +class ui4(Task.Task): + color='BLUE' + run_str='${QT_UIC} ${SRC} -o ${TGT}' + ext_out=['.h'] +class ts2qm(Task.Task): + color='BLUE' + run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' +class qm2rcc(Task.Task): + color='BLUE' + after='ts2qm' + def run(self): + txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) + code='\n\n%s\n\n'%txt + self.outputs[0].write(code) +def configure(self): + self.find_qt4_binaries() + self.set_qt4_libs_to_check() + self.set_qt4_defines() + self.find_qt4_libraries() + self.add_qt4_rpath() + self.simplify_qt4_libs() +@conf +def find_qt4_binaries(self): + env=self.env + opt=Options.options + qtdir=getattr(opt,'qtdir','') + qtbin=getattr(opt,'qtbin','') + paths=[] + if qtdir: + qtbin=os.path.join(qtdir,'bin') + if not qtdir: + qtdir=os.environ.get('QT4_ROOT','') + qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin') + if qtbin: + paths=[qtbin] + if not qtdir: + paths=os.environ.get('PATH','').split(os.pathsep) + paths.append('/usr/share/qt4/bin/') + try: + lst=Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + qtbin=os.path.join(qtdir,'bin') + paths.append(qtbin) + cand=None + prev_ver=['4','0','0'] + for qmk in('qmake-qt4','qmake4','qmake'): + try: + qmake=self.find_program(qmk,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver=version.split('.') + if new_ver>prev_ver: + cand=qmake + prev_ver=new_ver + if cand: + self.env.QMAKE=cand + else: + self.fatal('Could not find qmake for qt4') + qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep + def find_bin(lst,var): + if var in env: + return + for f in lst: + try: + ret=self.find_program(f,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + find_bin(['uic-qt3','uic3'],'QT_UIC3') + find_bin(['uic-qt4','uic'],'QT_UIC') + if not env.QT_UIC: + self.fatal('cannot find the uic compiler for qt4') + self.start_msg('Checking for uic version') + uicver=self.cmd_and_log(env.QT_UIC+["-version"],output=Context.BOTH) + uicver=''.join(uicver).strip() + uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + self.end_msg(uicver) + if uicver.find(' 3.')!=-1: + self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') + find_bin(['moc-qt4','moc'],'QT_MOC') + find_bin(['rcc-qt4','rcc'],'QT_RCC') + find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE') + env['UIC3_ST']='%s -o %s' + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + env.MOCCPPPATH_ST='-I%s' + env.MOCDEFINES_ST='-D%s' +@conf +def find_qt4_libraries(self): + qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None) + if not qtlibs: + try: + qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep + qtlibs=os.path.join(qtdir,'lib') + self.msg('Found the Qt4 libraries in',qtlibs) + qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() + env=self.env + if not'PKG_CONFIG_PATH'in os.environ: + os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs) + try: + if os.environ.get("QT4_XCOMPILE",None): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt4_vars: + uselib=i.upper() + if Utils.unversioned_sys_platform()=="darwin": + frameworkName=i+".framework" + qtDynamicLib=os.path.join(qtlibs,frameworkName,i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) + elif env.DEST_OS!="win32": + qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") + qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtStaticLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + uselib=i.upper()+"_debug" + for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for i in self.qt4_vars_debug+self.qt4_vars: + self.check_cfg(package=i,args='--cflags --libs',mandatory=False) +@conf +def simplify_qt4_libs(self): + env=self.env + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE': + continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(self.qt4_vars,'LIBPATH_QTCORE') + process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def add_qt4_rpath(self): + env=self.env + if getattr(Options.options,'want_rpath',False): + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(self.qt4_vars,'LIBPATH_QTCORE') + process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def set_qt4_libs_to_check(self): + if not hasattr(self,'qt4_vars'): + self.qt4_vars=QT4_LIBS + self.qt4_vars=Utils.to_list(self.qt4_vars) + if not hasattr(self,'qt4_vars_debug'): + self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars] + self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug) +@conf +def set_qt4_defines(self): + if sys.platform!='win32': + return + for x in self.qt4_vars: + y=x[2:].upper() + self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) +def options(opt): + opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i,type='string',default='',dest=i) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py new file mode 100644 index 00000000..f69c79dc --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py @@ -0,0 +1,489 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +try: + from xml.sax import make_parser + from xml.sax.handler import ContentHandler +except ImportError: + has_xml=False + ContentHandler=object +else: + has_xml=True +import os,sys +from waflib.Tools import cxx +from waflib import Task,Utils,Options,Errors,Context +from waflib.TaskGen import feature,after_method,extension +from waflib.Configure import conf +from waflib import Logs +MOC_H=['.h','.hpp','.hxx','.hh'] +EXT_RCC=['.qrc'] +EXT_UI=['.ui'] +EXT_QT5=['.cpp','.cc','.cxx','.C'] +QT5_LIBS=''' +qtmain +Qt5Bluetooth +Qt5CLucene +Qt5Concurrent +Qt5Core +Qt5DBus +Qt5Declarative +Qt5DesignerComponents +Qt5Designer +Qt5Gui +Qt5Help +Qt5MultimediaQuick_p +Qt5Multimedia +Qt5MultimediaWidgets +Qt5Network +Qt5Nfc +Qt5OpenGL +Qt5Positioning +Qt5PrintSupport +Qt5Qml +Qt5QuickParticles +Qt5Quick +Qt5QuickTest +Qt5Script +Qt5ScriptTools +Qt5Sensors +Qt5SerialPort +Qt5Sql +Qt5Svg +Qt5Test +Qt5WebKit +Qt5WebKitWidgets +Qt5Widgets +Qt5WinExtras +Qt5X11Extras +Qt5XmlPatterns +Qt5Xml''' +class qxx(Task.classes['cxx']): + def __init__(self,*k,**kw): + Task.Task.__init__(self,*k,**kw) + self.moc_done=0 + def runnable_status(self): + if self.moc_done: + return Task.Task.runnable_status(self) + else: + for t in self.run_after: + if not t.hasrun: + return Task.ASK_LATER + self.add_moc_tasks() + return Task.Task.runnable_status(self) + def create_moc_task(self,h_node,m_node): + try: + moc_cache=self.generator.bld.moc_cache + except AttributeError: + moc_cache=self.generator.bld.moc_cache={} + try: + return moc_cache[h_node] + except KeyError: + tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) + tsk.set_inputs(h_node) + tsk.set_outputs(m_node) + if self.generator: + self.generator.tasks.append(tsk) + gen=self.generator.bld.producer + gen.outstanding.insert(0,tsk) + gen.total+=1 + return tsk + else: + delattr(self,'cache_sig') + def moc_h_ext(self): + ext=[] + try: + ext=Options.options.qt_header_ext.split() + except AttributeError: + pass + if not ext: + ext=MOC_H + return ext + def add_moc_tasks(self): + node=self.inputs[0] + bld=self.generator.bld + try: + self.signature() + except KeyError: + pass + else: + delattr(self,'cache_sig') + include_nodes=[node.parent]+self.generator.includes_nodes + moctasks=[] + mocfiles=set([]) + for d in bld.raw_deps.get(self.uid(),[]): + if not d.endswith('.moc'): + continue + if d in mocfiles: + continue + mocfiles.add(d) + h_node=None + base2=d[:-4] + for x in include_nodes: + for e in self.moc_h_ext(): + h_node=x.find_node(base2+e) + if h_node: + break + if h_node: + m_node=h_node.change_ext('.moc') + break + else: + for k in EXT_QT5: + if base2.endswith(k): + for x in include_nodes: + h_node=x.find_node(base2) + if h_node: + break + if h_node: + m_node=h_node.change_ext(k+'.moc') + break + if not h_node: + raise Errors.WafError('No source found for %r which is a moc file'%d) + task=self.create_moc_task(h_node,m_node) + moctasks.append(task) + self.run_after.update(set(moctasks)) + self.moc_done=1 +class trans_update(Task.Task): + run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' + color='BLUE' +Task.update_outputs(trans_update) +class XMLHandler(ContentHandler): + def __init__(self): + self.buf=[] + self.files=[] + def startElement(self,name,attrs): + if name=='file': + self.buf=[] + def endElement(self,name): + if name=='file': + self.files.append(str(''.join(self.buf))) + def characters(self,cars): + self.buf.append(cars) +@extension(*EXT_RCC) +def create_rcc_task(self,node): + rcnode=node.change_ext('_rc.cpp') + self.create_task('rcc',node,rcnode) + cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) + try: + self.compiled_tasks.append(cpptask) + except AttributeError: + self.compiled_tasks=[cpptask] + return cpptask +@extension(*EXT_UI) +def create_uic_task(self,node): + uictask=self.create_task('ui5',node) + uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] +@extension('.ts') +def add_lang(self,node): + self.lang=self.to_list(getattr(self,'lang',[]))+[node] +@feature('qt5') +@after_method('apply_link') +def apply_qt5(self): + if getattr(self,'lang',None): + qmtasks=[] + for x in self.to_list(self.lang): + if isinstance(x,str): + x=self.path.find_resource(x+'.ts') + qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) + if getattr(self,'update',None)and Options.options.trans_qt5: + cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] + for x in qmtasks: + self.create_task('trans_update',cxxnodes,x.inputs) + if getattr(self,'langname',None): + qmnodes=[x.outputs[0]for x in qmtasks] + rcnode=self.langname + if isinstance(rcnode,str): + rcnode=self.path.find_or_declare(rcnode+'.qrc') + t=self.create_task('qm2rcc',qmnodes,rcnode) + k=create_rcc_task(self,t.outputs[0]) + self.link_task.inputs.append(k.outputs[0]) + lst=[] + for flag in self.to_list(self.env['CXXFLAGS']): + if len(flag)<2:continue + f=flag[0:2] + if f in('-D','-I','/D','/I'): + if(f[0]=='/'): + lst.append('-'+flag[1:]) + else: + lst.append(flag) + self.env.append_value('MOC_FLAGS',lst) +@extension(*EXT_QT5) +def cxx_hook(self,node): + return self.create_compiled_task('qxx',node) +class rcc(Task.Task): + color='BLUE' + run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' + ext_out=['.h'] + def rcname(self): + return os.path.splitext(self.inputs[0].name)[0] + def scan(self): + if not has_xml: + Logs.error('no xml support was found, the rcc dependencies will be incomplete!') + return([],[]) + parser=make_parser() + curHandler=XMLHandler() + parser.setContentHandler(curHandler) + fi=open(self.inputs[0].abspath(),'r') + try: + parser.parse(fi) + finally: + fi.close() + nodes=[] + names=[] + root=self.inputs[0].parent + for x in curHandler.files: + nd=root.find_resource(x) + if nd:nodes.append(nd) + else:names.append(x) + return(nodes,names) +class moc(Task.Task): + color='BLUE' + run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' +class ui5(Task.Task): + color='BLUE' + run_str='${QT_UIC} ${SRC} -o ${TGT}' + ext_out=['.h'] +class ts2qm(Task.Task): + color='BLUE' + run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' +class qm2rcc(Task.Task): + color='BLUE' + after='ts2qm' + def run(self): + txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) + code='\n\n%s\n\n'%txt + self.outputs[0].write(code) +def configure(self): + self.find_qt5_binaries() + self.set_qt5_libs_to_check() + self.set_qt5_defines() + self.find_qt5_libraries() + self.add_qt5_rpath() + self.simplify_qt5_libs() +@conf +def find_qt5_binaries(self): + env=self.env + opt=Options.options + qtdir=getattr(opt,'qtdir','') + qtbin=getattr(opt,'qtbin','') + paths=[] + if qtdir: + qtbin=os.path.join(qtdir,'bin') + if not qtdir: + qtdir=os.environ.get('QT5_ROOT','') + qtbin=os.environ.get('QT5_BIN',None)or os.path.join(qtdir,'bin') + if qtbin: + paths=[qtbin] + if not qtdir: + paths=os.environ.get('PATH','').split(os.pathsep) + paths.append('/usr/share/qt5/bin/') + try: + lst=Utils.listdir('/usr/local/Trolltech/') + except OSError: + pass + else: + if lst: + lst.sort() + lst.reverse() + qtdir='/usr/local/Trolltech/%s/'%lst[0] + qtbin=os.path.join(qtdir,'bin') + paths.append(qtbin) + cand=None + prev_ver=['5','0','0'] + for qmk in('qmake-qt5','qmake5','qmake'): + try: + qmake=self.find_program(qmk,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + try: + version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() + except self.errors.WafError: + pass + else: + if version: + new_ver=version.split('.') + if new_ver>prev_ver: + cand=qmake + prev_ver=new_ver + if not cand: + try: + self.find_program('qtchooser') + except self.errors.ConfigurationError: + pass + else: + cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake'] + try: + version=self.cmd_and_log(cmd+['-query','QT_VERSION']) + except self.errors.WafError: + pass + else: + cand=cmd + if cand: + self.env.QMAKE=cand + else: + self.fatal('Could not find qmake for qt5') + self.env.QT_INSTALL_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep + paths.insert(0,qtbin) + def find_bin(lst,var): + if var in env: + return + for f in lst: + try: + ret=self.find_program(f,path_list=paths) + except self.errors.ConfigurationError: + pass + else: + env[var]=ret + break + find_bin(['uic-qt5','uic'],'QT_UIC') + if not env.QT_UIC: + self.fatal('cannot find the uic compiler for qt5') + self.start_msg('Checking for uic version') + uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH) + uicver=''.join(uicver).strip() + uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') + self.end_msg(uicver) + if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1: + self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path') + find_bin(['moc-qt5','moc'],'QT_MOC') + find_bin(['rcc-qt5','rcc'],'QT_RCC') + find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE') + find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE') + env['UIC_ST']='%s -o %s' + env['MOC_ST']='-o' + env['ui_PATTERN']='ui_%s.h' + env['QT_LRELEASE_FLAGS']=['-silent'] + env.MOCCPPPATH_ST='-I%s' + env.MOCDEFINES_ST='-D%s' +@conf +def find_qt5_libraries(self): + qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT5_LIBDIR",None) + if not qtlibs: + try: + qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() + except Errors.WafError: + qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep + qtlibs=os.path.join(qtdir,'lib') + self.msg('Found the Qt5 libraries in',qtlibs) + qtincludes=os.environ.get("QT5_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() + env=self.env + if not'PKG_CONFIG_PATH'in os.environ: + os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(qtlibs,qtlibs) + try: + if os.environ.get("QT5_XCOMPILE",None): + raise self.errors.ConfigurationError() + self.check_cfg(atleast_pkgconfig_version='0.1') + except self.errors.ConfigurationError: + for i in self.qt5_vars: + uselib=i.upper() + if Utils.unversioned_sys_platform()=="darwin": + frameworkName=i+".framework" + qtDynamicLib=os.path.join(qtlibs,frameworkName,i) + if os.path.exists(qtDynamicLib): + env.append_unique('FRAMEWORK_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) + elif env.DEST_OS!="win32": + qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") + qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") + if os.path.exists(qtDynamicLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') + elif os.path.exists(qtStaticLib): + env.append_unique('LIB_'+uselib,i) + self.msg('Checking for %s'%i,qtStaticLib,'GREEN') + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) + else: + for k in("lib%s.a","lib%s5.a","%s.lib","%s5.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i.replace('Qt5','Qt'))) + uselib=i.upper()+"_debug" + for k in("lib%sd.a","lib%sd5.a","%sd.lib","%sd5.lib"): + lib=os.path.join(qtlibs,k%i) + if os.path.exists(lib): + env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) + self.msg('Checking for %s'%i,lib,'GREEN') + break + else: + self.msg('Checking for %s'%i,False,'YELLOW') + env.append_unique('LIBPATH_'+uselib,qtlibs) + env.append_unique('INCLUDES_'+uselib,qtincludes) + env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i.replace('Qt5','Qt'))) + else: + for i in self.qt5_vars_debug+self.qt5_vars: + self.check_cfg(package=i,args='--cflags --libs',mandatory=False) +@conf +def simplify_qt5_libs(self): + env=self.env + def process_lib(vars_,coreval): + for d in vars_: + var=d.upper() + if var=='QTCORE': + continue + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if lib in core: + continue + accu.append(lib) + env['LIBPATH_'+var]=accu + process_lib(self.qt5_vars,'LIBPATH_QTCORE') + process_lib(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def add_qt5_rpath(self): + env=self.env + if getattr(Options.options,'want_rpath',False): + def process_rpath(vars_,coreval): + for d in vars_: + var=d.upper() + value=env['LIBPATH_'+var] + if value: + core=env[coreval] + accu=[] + for lib in value: + if var!='QTCORE': + if lib in core: + continue + accu.append('-Wl,--rpath='+lib) + env['RPATH_'+var]=accu + process_rpath(self.qt5_vars,'LIBPATH_QTCORE') + process_rpath(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG') +@conf +def set_qt5_libs_to_check(self): + if not hasattr(self,'qt5_vars'): + self.qt5_vars=QT5_LIBS + self.qt5_vars=Utils.to_list(self.qt5_vars) + if not hasattr(self,'qt5_vars_debug'): + self.qt5_vars_debug=[a+'_debug'for a in self.qt5_vars] + self.qt5_vars_debug=Utils.to_list(self.qt5_vars_debug) +@conf +def set_qt5_defines(self): + if sys.platform!='win32': + return + for x in self.qt5_vars: + y=x.replace('Qt5','Qt')[2:].upper() + self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) + self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) +def options(opt): + opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') + opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') + for i in'qtdir qtbin qtlibs'.split(): + opt.add_option('--'+i,type='string',default='',dest=i) + opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt5",default=False) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py new file mode 100644 index 00000000..39c170ae --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py @@ -0,0 +1,103 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib import Options,Utils,Task +from waflib.TaskGen import before_method,feature,extension +from waflib.Configure import conf +@feature('rubyext') +@before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link') +def init_rubyext(self): + self.install_path='${ARCHDIR_RUBY}' + self.uselib=self.to_list(getattr(self,'uselib','')) + if not'RUBY'in self.uselib: + self.uselib.append('RUBY') + if not'RUBYEXT'in self.uselib: + self.uselib.append('RUBYEXT') +@feature('rubyext') +@before_method('apply_link','propagate_uselib') +def apply_ruby_so_name(self): + self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN'] +@conf +def check_ruby_version(self,minver=()): + if Options.options.rubybinary: + self.env.RUBY=Options.options.rubybinary + else: + self.find_program('ruby',var='RUBY') + ruby=self.env.RUBY + try: + version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() + except Exception: + self.fatal('could not determine ruby version') + self.env.RUBY_VERSION=version + try: + ver=tuple(map(int,version.split("."))) + except Exception: + self.fatal('unsupported ruby version %r'%version) + cver='' + if minver: + if ver=(1,9,0): + ruby_hdrdir=read_config('rubyhdrdir') + cpppath+=ruby_hdrdir + if version>=(2,0,0): + cpppath+=read_config('rubyarchhdrdir') + cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] + self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file',link_header_test=False) + self.env.LIBPATH_RUBYEXT=read_config('libdir') + self.env.LIBPATH_RUBYEXT+=archdir + self.env.INCLUDES_RUBYEXT=cpppath + self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') + self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] + flags=read_config('LDSHARED') + while flags and flags[0][0]!='-': + flags=flags[1:] + if len(flags)>1 and flags[1]=="ppc": + flags=flags[2:] + self.env.LINKFLAGS_RUBYEXT=flags + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') + self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') + if Options.options.rubyarchdir: + self.env.ARCHDIR_RUBY=Options.options.rubyarchdir + else: + self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] + if Options.options.rubylibdir: + self.env.LIBDIR_RUBY=Options.options.rubylibdir + else: + self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] +@conf +def check_ruby_module(self,module_name): + self.start_msg('Ruby module %s'%module_name) + try: + self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name]) + except Exception: + self.end_msg(False) + self.fatal('Could not find the ruby module %r'%module_name) + self.end_msg(True) +@extension('.rb') +def process(self,node): + return self.create_task('run_ruby',node) +class run_ruby(Task.Task): + run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' +def options(opt): + opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') + opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') + opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py new file mode 100644 index 00000000..f014abfc --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py @@ -0,0 +1,46 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_scc(conf): + v=conf.env + cc=conf.find_program('cc',var='CC') + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v.CC_NAME='sun' + conf.get_suncc_version(cc) +@conf +def scc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']='' + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-xcode=pic32','-DPIC'] + v['LINKFLAGS_cshlib']=['-G'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=['-Bstatic'] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_scc() + conf.find_ar() + conf.scc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py new file mode 100644 index 00000000..7130fdfd --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py @@ -0,0 +1,46 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_sxx(conf): + v=conf.env + cc=conf.find_program(['CC','c++'],var='CXX') + try: + conf.cmd_and_log(cc+['-flags']) + except Exception: + conf.fatal('%r is not a Sun compiler'%cc) + v.CXX_NAME='sun' + conf.get_suncc_version(cc) +@conf +def sxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['SONAME_ST']='-Wl,-h,%s' + v['SHLIB_MARKER']='-Bdynamic' + v['STLIB_MARKER']='-Bstatic' + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-xcode=pic32','-DPIC'] + v['LINKFLAGS_cxxshlib']=['-G'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=['-Bstatic'] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_sxx() + conf.find_ar() + conf.sxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py new file mode 100644 index 00000000..a91fd911 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py @@ -0,0 +1,317 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re +from waflib import Utils,Task,Errors,Logs,Node +from waflib.TaskGen import feature,before_method +re_bibunit=re.compile(r'\\(?Pputbib)\[(?P[^\[\]]*)\]',re.M) +def bibunitscan(self): + node=self.inputs[0] + nodes=[] + if not node:return nodes + code=node.read() + for match in re_bibunit.finditer(code): + path=match.group('file') + if path: + for k in('','.bib'): + Logs.debug('tex: trying %s%s'%(path,k)) + fi=node.parent.find_resource(path+k) + if fi: + nodes.append(fi) + else: + Logs.debug('tex: could not find %s'%path) + Logs.debug("tex: found the following bibunit files: %s"%nodes) + return nodes +exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty'] +exts_tex=['.ltx','.tex'] +re_tex=re.compile(r'\\(?Pusepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P[^{}]*)}',re.M) +g_bibtex_re=re.compile('bibdata',re.M) +g_glossaries_re=re.compile('\\@newglossary',re.M) +class tex(Task.Task): + bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False) + bibtex_fun.__doc__=""" + Execute the program **bibtex** + """ + makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False) + makeindex_fun.__doc__=""" + Execute the program **makeindex** + """ + makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False) + makeglossaries_fun.__doc__=""" + Execute the program **makeglossaries** + """ + def exec_command(self,cmd,**kw): + bld=self.generator.bld + Logs.info('runner: %r'%cmd) + try: + if not kw.get('cwd',None): + kw['cwd']=bld.cwd + except AttributeError: + bld.cwd=kw['cwd']=bld.variant_dir + return Utils.subprocess.Popen(cmd,**kw).wait() + def scan_aux(self,node): + nodes=[node] + re_aux=re.compile(r'\\@input{(?P[^{}]*)}',re.M) + def parse_node(node): + code=node.read() + for match in re_aux.finditer(code): + path=match.group('file') + found=node.parent.find_or_declare(path) + if found and found not in nodes: + Logs.debug('tex: found aux node '+found.abspath()) + nodes.append(found) + parse_node(found) + parse_node(node) + return nodes + def scan(self): + node=self.inputs[0] + nodes=[] + names=[] + seen=[] + if not node:return(nodes,names) + def parse_node(node): + if node in seen: + return + seen.append(node) + code=node.read() + global re_tex + for match in re_tex.finditer(code): + multibib=match.group('type') + if multibib and multibib.startswith('bibliography'): + multibib=multibib[len('bibliography'):] + if multibib.startswith('style'): + continue + else: + multibib=None + for path in match.group('file').split(','): + if path: + add_name=True + found=None + for k in exts_deps_tex: + for up in self.texinputs_nodes: + Logs.debug('tex: trying %s%s'%(path,k)) + found=up.find_resource(path+k) + if found: + break + for tsk in self.generator.tasks: + if not found or found in tsk.outputs: + break + else: + nodes.append(found) + add_name=False + for ext in exts_tex: + if found.name.endswith(ext): + parse_node(found) + break + if found and multibib and found.name.endswith('.bib'): + try: + self.multibibs.append(found) + except AttributeError: + self.multibibs=[found] + if add_name: + names.append(path) + parse_node(node) + for x in nodes: + x.parent.get_bld().mkdir() + Logs.debug("tex: found the following : %s and names %s"%(nodes,names)) + return(nodes,names) + def check_status(self,msg,retcode): + if retcode!=0: + raise Errors.WafError("%r command exit status %r"%(msg,retcode)) + def bibfile(self): + for aux_node in self.aux_nodes: + try: + ct=aux_node.read() + except EnvironmentError: + Logs.error('Error reading %s: %r'%aux_node.abspath()) + continue + if g_bibtex_re.findall(ct): + Logs.info('calling bibtex') + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) + self.env.SRCFILE=aux_node.name[:-4] + self.check_status('error when calling bibtex',self.bibtex_fun()) + for node in getattr(self,'multibibs',[]): + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) + self.env.SRCFILE=node.name[:-4] + self.check_status('error when calling bibtex',self.bibtex_fun()) + def bibunits(self): + try: + bibunits=bibunitscan(self) + except OSError: + Logs.error('error bibunitscan') + else: + if bibunits: + fn=['bu'+str(i)for i in range(1,len(bibunits)+1)] + if fn: + Logs.info('calling bibtex on bibunits') + for f in fn: + self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()} + self.env.SRCFILE=f + self.check_status('error when calling bibtex',self.bibtex_fun()) + def makeindex(self): + self.idx_node=self.inputs[0].change_ext('.idx') + try: + idx_path=self.idx_node.abspath() + os.stat(idx_path) + except OSError: + Logs.info('index file %s absent, not calling makeindex'%idx_path) + else: + Logs.info('calling makeindex') + self.env.SRCFILE=self.idx_node.name + self.env.env={} + self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun()) + def bibtopic(self): + p=self.inputs[0].parent.get_bld() + if os.path.exists(os.path.join(p.abspath(),'btaux.aux')): + self.aux_nodes+=p.ant_glob('*[0-9].aux') + def makeglossaries(self): + src_file=self.inputs[0].abspath() + base_file=os.path.basename(src_file) + base,_=os.path.splitext(base_file) + for aux_node in self.aux_nodes: + try: + ct=aux_node.read() + except EnvironmentError: + Logs.error('Error reading %s: %r'%aux_node.abspath()) + continue + if g_glossaries_re.findall(ct): + if not self.env.MAKEGLOSSARIES: + raise Errors.WafError("The program 'makeglossaries' is missing!") + Logs.warn('calling makeglossaries') + self.env.SRCFILE=base + self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun()) + return + def texinputs(self): + return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep + def run(self): + env=self.env + if not env['PROMPT_LATEX']: + env.append_value('LATEXFLAGS','-interaction=batchmode') + env.append_value('PDFLATEXFLAGS','-interaction=batchmode') + env.append_value('XELATEXFLAGS','-interaction=batchmode') + self.cwd=self.inputs[0].parent.get_bld().abspath() + Logs.info('first pass on %s'%self.__class__.__name__) + cur_hash=self.hash_aux_nodes() + self.call_latex() + self.hash_aux_nodes() + self.bibtopic() + self.bibfile() + self.bibunits() + self.makeindex() + self.makeglossaries() + for i in range(10): + prev_hash=cur_hash + cur_hash=self.hash_aux_nodes() + if not cur_hash: + Logs.error('No aux.h to process') + if cur_hash and cur_hash==prev_hash: + break + Logs.info('calling %s'%self.__class__.__name__) + self.call_latex() + def hash_aux_nodes(self): + try: + self.aux_nodes + except AttributeError: + try: + self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux')) + except IOError: + return None + return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes]) + def call_latex(self): + self.env.env={} + self.env.env.update(os.environ) + self.env.env.update({'TEXINPUTS':self.texinputs()}) + self.env.SRCFILE=self.inputs[0].abspath() + self.check_status('error when calling latex',self.texfun()) +class latex(tex): + texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False) +class pdflatex(tex): + texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False) +class xelatex(tex): + texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False) +class dvips(Task.Task): + run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class dvipdf(Task.Task): + run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +class pdf2ps(Task.Task): + run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' + color='BLUE' + after=['latex','pdflatex','xelatex'] +@feature('tex') +@before_method('process_source') +def apply_tex(self): + if not getattr(self,'type',None)in('latex','pdflatex','xelatex'): + self.type='pdflatex' + outs=Utils.to_list(getattr(self,'outs',[])) + self.env['PROMPT_LATEX']=getattr(self,'prompt',1) + deps_lst=[] + if getattr(self,'deps',None): + deps=self.to_list(self.deps) + for dep in deps: + if isinstance(dep,str): + n=self.path.find_resource(dep) + if not n: + self.bld.fatal('Could not find %r for %r'%(dep,self)) + if not n in deps_lst: + deps_lst.append(n) + elif isinstance(dep,Node.Node): + deps_lst.append(dep) + for node in self.to_nodes(self.source): + if self.type=='latex': + task=self.create_task('latex',node,node.change_ext('.dvi')) + elif self.type=='pdflatex': + task=self.create_task('pdflatex',node,node.change_ext('.pdf')) + elif self.type=='xelatex': + task=self.create_task('xelatex',node,node.change_ext('.pdf')) + task.env=self.env + if deps_lst: + for n in deps_lst: + if not n in task.dep_nodes: + task.dep_nodes.append(n) + if hasattr(self,'texinputs_nodes'): + task.texinputs_nodes=self.texinputs_nodes + else: + task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()] + lst=os.environ.get('TEXINPUTS','') + if self.env.TEXINPUTS: + lst+=os.pathsep+self.env.TEXINPUTS + if lst: + lst=lst.split(os.pathsep) + for x in lst: + if x: + if os.path.isabs(x): + p=self.bld.root.find_node(x) + if p: + task.texinputs_nodes.append(p) + else: + Logs.error('Invalid TEXINPUTS folder %s'%x) + else: + Logs.error('Cannot resolve relative paths in TEXINPUTS %s'%x) + if self.type=='latex': + if'ps'in outs: + tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps')) + tsk.env.env=dict(os.environ) + if'pdf'in outs: + tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf')) + tsk.env.env=dict(os.environ) + elif self.type=='pdflatex': + if'ps'in outs: + self.create_task('pdf2ps',task.outputs,node.change_ext('.ps')) + self.source=[] +def configure(self): + v=self.env + for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): + try: + self.find_program(p,var=p.upper()) + except self.errors.ConfigurationError: + pass + v['DVIPSFLAGS']='-Ppdf' diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py new file mode 100644 index 00000000..68af6511 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py @@ -0,0 +1,211 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re +from waflib import Context,Task,Utils,Logs,Options,Errors,Node +from waflib.TaskGen import extension,taskgen_method +from waflib.Configure import conf +class valac(Task.Task): + vars=["VALAC","VALAC_VERSION","VALAFLAGS"] + ext_out=['.h'] + def run(self): + cmd=self.env.VALAC+self.env.VALAFLAGS + resources=getattr(self,'vala_exclude',[]) + cmd.extend([a.abspath()for a in self.inputs if a not in resources]) + ret=self.exec_command(cmd,cwd=self.vala_dir_node.abspath()) + if ret: + return ret + if self.generator.dump_deps_node: + self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) + return ret +valac=Task.update_outputs(valac) +@taskgen_method +def init_vala_task(self): + self.profile=getattr(self,'profile','gobject') + if self.profile=='gobject': + self.uselib=Utils.to_list(getattr(self,'uselib',[])) + if not'GOBJECT'in self.uselib: + self.uselib.append('GOBJECT') + def addflags(flags): + self.env.append_value('VALAFLAGS',flags) + if self.profile: + addflags('--profile=%s'%self.profile) + valatask=self.valatask + if hasattr(self,'vala_dir'): + if isinstance(self.vala_dir,str): + valatask.vala_dir_node=self.path.get_bld().make_node(self.vala_dir) + try: + valatask.vala_dir_node.mkdir() + except OSError: + raise self.bld.fatal('Cannot create the vala dir %r'%valatask.vala_dir_node) + else: + valatask.vala_dir_node=self.vala_dir + else: + valatask.vala_dir_node=self.path.get_bld() + addflags('--directory=%s'%valatask.vala_dir_node.abspath()) + if hasattr(self,'thread'): + if self.profile=='gobject': + if not'GTHREAD'in self.uselib: + self.uselib.append('GTHREAD') + else: + Logs.warn("Profile %s means no threading support"%self.profile) + self.thread=False + if self.thread: + addflags('--thread') + self.is_lib='cprogram'not in self.features + if self.is_lib: + addflags('--library=%s'%self.target) + h_node=valatask.vala_dir_node.find_or_declare('%s.h'%self.target) + valatask.outputs.append(h_node) + addflags('--header=%s'%h_node.name) + valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi'%self.target)) + if getattr(self,'gir',None): + gir_node=valatask.vala_dir_node.find_or_declare('%s.gir'%self.gir) + addflags('--gir=%s'%gir_node.name) + valatask.outputs.append(gir_node) + self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None)) + if self.vala_target_glib: + addflags('--target-glib=%s'%self.vala_target_glib) + addflags(['--define=%s'%x for x in getattr(self,'vala_defines',[])]) + packages_private=Utils.to_list(getattr(self,'packages_private',[])) + addflags(['--pkg=%s'%x for x in packages_private]) + def _get_api_version(): + api_version='1.0' + if hasattr(Context.g_module,'API_VERSION'): + version=Context.g_module.API_VERSION.split(".") + if version[0]=="0": + api_version="0."+version[1] + else: + api_version=version[0]+".0" + return api_version + self.includes=Utils.to_list(getattr(self,'includes',[])) + self.uselib=self.to_list(getattr(self,'uselib',[])) + valatask.install_path=getattr(self,'install_path','') + valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi') + valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE']) + valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version())) + valatask.install_binding=getattr(self,'install_binding',True) + self.packages=packages=Utils.to_list(getattr(self,'packages',[])) + self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) + if hasattr(self,'use'): + local_packages=Utils.to_list(self.use)[:] + seen=[] + while len(local_packages)>0: + package=local_packages.pop() + if package in seen: + continue + seen.append(package) + try: + package_obj=self.bld.get_tgen_by_name(package) + except Errors.WafError: + continue + package_name=package_obj.target + for task in package_obj.tasks: + for output in task.outputs: + if output.name==package_name+".vapi": + valatask.set_run_after(task) + if package_name not in packages: + packages.append(package_name) + if output.parent not in vapi_dirs: + vapi_dirs.append(output.parent) + if output.parent not in self.includes: + self.includes.append(output.parent) + if hasattr(package_obj,'use'): + lst=self.to_list(package_obj.use) + lst.reverse() + local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages + addflags(['--pkg=%s'%p for p in packages]) + for vapi_dir in vapi_dirs: + if isinstance(vapi_dir,Node.Node): + v_node=vapi_dir + else: + v_node=self.path.find_dir(vapi_dir) + if not v_node: + Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir) + else: + addflags('--vapidir=%s'%v_node.abspath()) + self.dump_deps_node=None + if self.is_lib and self.packages: + self.dump_deps_node=valatask.vala_dir_node.find_or_declare('%s.deps'%self.target) + valatask.outputs.append(self.dump_deps_node) + self.includes.append(self.bld.srcnode.abspath()) + self.includes.append(self.bld.bldnode.abspath()) + if self.is_lib and valatask.install_binding: + headers_list=[o for o in valatask.outputs if o.suffix()==".h"] + try: + self.install_vheader.source=headers_list + except AttributeError: + self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env) + vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))] + try: + self.install_vapi.source=vapi_list + except AttributeError: + self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env) + gir_list=[o for o in valatask.outputs if o.suffix()=='.gir'] + try: + self.install_gir.source=gir_list + except AttributeError: + self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env) + if hasattr(self,'vala_resources'): + nodes=self.to_nodes(self.vala_resources) + valatask.vala_exclude=getattr(valatask,'vala_exclude',[])+nodes + valatask.inputs.extend(nodes) + for x in nodes: + addflags(['--gresources',x.abspath()]) +@extension('.vala','.gs') +def vala_file(self,node): + try: + valatask=self.valatask + except AttributeError: + valatask=self.valatask=self.create_task('valac') + self.init_vala_task() + valatask.inputs.append(node) + name=node.name[:node.name.rfind('.')]+'.c' + c_node=valatask.vala_dir_node.find_or_declare(name) + valatask.outputs.append(c_node) + self.source.append(c_node) +@conf +def find_valac(self,valac_name,min_version): + valac=self.find_program(valac_name,var='VALAC') + try: + output=self.cmd_and_log(valac+['--version']) + except Exception: + valac_version=None + else: + ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.') + valac_version=tuple([int(x)for x in ver]) + self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version) + if valac and valac_version= %r"%(valac_name,valac_version,min_version)) + self.env['VALAC_VERSION']=valac_version + return valac +@conf +def check_vala(self,min_version=(0,8,0),branch=None): + if not branch: + branch=min_version[:2] + try: + find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version) + except self.errors.ConfigurationError: + find_valac(self,'valac',min_version) +@conf +def check_vala_deps(self): + if not self.env['HAVE_GOBJECT']: + pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) + if not self.env['HAVE_GTHREAD']: + pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'} + if getattr(Options.options,'vala_target_glib',None): + pkg_args['atleast_version']=Options.options.vala_target_glib + self.check_cfg(**pkg_args) +def configure(self): + self.load('gnu_dirs') + self.check_vala_deps() + self.check_vala() + self.env.VALAFLAGS=['-C'] +def options(opt): + opt.load('gnu_dirs') + valaopts=opt.add_option_group('Vala Compiler Options') + valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py new file mode 100644 index 00000000..d07e1ed8 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py @@ -0,0 +1,106 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +from waflib.TaskGen import feature,after_method,taskgen_method +from waflib import Utils,Task,Logs,Options +testlock=Utils.threading.Lock() +@feature('test') +@after_method('apply_link') +def make_test(self): + if getattr(self,'link_task',None): + self.create_task('utest',self.link_task.outputs) +@taskgen_method +def add_test_results(self,tup): + Logs.debug("ut: %r",tup) + self.utest_result=tup + try: + self.bld.utest_results.append(tup) + except AttributeError: + self.bld.utest_results=[tup] +class utest(Task.Task): + color='PINK' + after=['vnum','inst'] + vars=[] + def runnable_status(self): + if getattr(Options.options,'no_tests',False): + return Task.SKIP_ME + ret=super(utest,self).runnable_status() + if ret==Task.SKIP_ME: + if getattr(Options.options,'all_tests',False): + return Task.RUN_ME + return ret + def add_path(self,dct,path,var): + dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')]) + def get_test_env(self): + try: + fu=getattr(self.generator.bld,'all_test_paths') + except AttributeError: + fu=os.environ.copy() + lst=[] + for g in self.generator.bld.groups: + for tg in g: + if getattr(tg,'link_task',None): + s=tg.link_task.outputs[0].parent.abspath() + if s not in lst: + lst.append(s) + if Utils.is_win32: + self.add_path(fu,lst,'PATH') + elif Utils.unversioned_sys_platform()=='darwin': + self.add_path(fu,lst,'DYLD_LIBRARY_PATH') + self.add_path(fu,lst,'LD_LIBRARY_PATH') + else: + self.add_path(fu,lst,'LD_LIBRARY_PATH') + self.generator.bld.all_test_paths=fu + return fu + def run(self): + filename=self.inputs[0].abspath() + self.ut_exec=getattr(self.generator,'ut_exec',[filename]) + if getattr(self.generator,'ut_fun',None): + self.generator.ut_fun(self) + cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath() + testcmd=getattr(self.generator,'ut_cmd',False)or getattr(Options.options,'testcmd',False) + if testcmd: + self.ut_exec=(testcmd%self.ut_exec[0]).split(' ') + proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE) + (stdout,stderr)=proc.communicate() + self.waf_unit_test_results=tup=(filename,proc.returncode,stdout,stderr) + testlock.acquire() + try: + return self.generator.add_test_results(tup) + finally: + testlock.release() + def post_run(self): + super(utest,self).post_run() + if getattr(Options.options,'clear_failed_tests',False)and self.waf_unit_test_results[1]: + self.generator.bld.task_sigs[self.uid()]=None +def summary(bld): + lst=getattr(bld,'utest_results',[]) + if lst: + Logs.pprint('CYAN','execution summary') + total=len(lst) + tfail=len([x for x in lst if x[1]]) + Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total)) + for(f,code,out,err)in lst: + if not code: + Logs.pprint('CYAN',' %s'%f) + Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total)) + for(f,code,out,err)in lst: + if code: + Logs.pprint('CYAN',' %s'%f) +def set_exit_code(bld): + lst=getattr(bld,'utest_results',[]) + for(f,code,out,err)in lst: + if code: + msg=[] + if out: + msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8'))) + if err: + msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8'))) + bld.fatal(os.linesep.join(msg)) +def options(opt): + opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests') + opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests') + opt.add_option('--clear-failed',action='store_true',default=False,help='Force failed unit tests to run again next time',dest='clear_failed_tests') + opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py new file mode 100644 index 00000000..a055887b --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py @@ -0,0 +1,85 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import re,traceback +from waflib import Task,Logs,Utils +from waflib.TaskGen import extension +from waflib.Tools import c_preproc +@extension('.rc') +def rc_file(self,node): + obj_ext='.rc.o' + if self.env['WINRC_TGT_F']=='/fo': + obj_ext='.res' + rctask=self.create_task('winrc',node,node.change_ext(obj_ext)) + try: + self.compiled_tasks.append(rctask) + except AttributeError: + self.compiled_tasks=[rctask] +re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE) +class rc_parser(c_preproc.c_parser): + def filter_comments(self,filepath): + code=Utils.readf(filepath) + if c_preproc.use_trigraphs: + for(a,b)in c_preproc.trig_def:code=code.split(a).join(b) + code=c_preproc.re_nl.sub('',code) + code=c_preproc.re_cpp.sub(c_preproc.repl,code) + ret=[] + for m in re.finditer(re_lines,code): + if m.group(2): + ret.append((m.group(2),m.group(3))) + else: + ret.append(('include',m.group(5))) + return ret + def addlines(self,node): + self.currentnode_stack.append(node.parent) + filepath=node.abspath() + self.count_files+=1 + if self.count_files>c_preproc.recursion_limit: + raise c_preproc.PreprocError("recursion limit exceeded") + pc=self.parse_cache + Logs.debug('preproc: reading file %r',filepath) + try: + lns=pc[filepath] + except KeyError: + pass + else: + self.lines.extend(lns) + return + try: + lines=self.filter_comments(filepath) + lines.append((c_preproc.POPFILE,'')) + lines.reverse() + pc[filepath]=lines + self.lines.extend(lines) + except IOError: + raise c_preproc.PreprocError("could not read the file %s"%filepath) + except Exception: + if Logs.verbose>0: + Logs.error("parsing %s failed"%filepath) + traceback.print_exc() +class winrc(Task.Task): + run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' + color='BLUE' + def scan(self): + tmp=rc_parser(self.generator.includes_nodes) + tmp.start(self.inputs[0],self.env) + nodes=tmp.nodes + names=tmp.names + if Logs.verbose: + Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names)) + return(nodes,names) +def configure(conf): + v=conf.env + v['WINRC_TGT_F']='-o' + v['WINRC_SRC_F']='-i' + if not conf.env.WINRC: + if v.CC_NAME=='msvc': + conf.find_program('RC',var='WINRC',path_list=v['PATH']) + v['WINRC_TGT_F']='/fo' + v['WINRC_SRC_F']='' + else: + conf.find_program('windres',var='WINRC',path_list=v['PATH']) + if not conf.env.WINRC: + conf.fatal('winrc was not found!') + v['WINRCFLAGS']=[] diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py new file mode 100644 index 00000000..c56443b7 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlc(conf): + cc=conf.find_program(['xlc_r','xlc'],var='CC') + conf.get_xlc_version(cc) + conf.env.CC_NAME='xlc' +@conf +def xlc_common_flags(conf): + v=conf.env + v['CC_SRC_F']=[] + v['CC_TGT_F']=['-c','-o'] + if not v['LINK_CC']:v['LINK_CC']=v['CC'] + v['CCLNK_SRC_F']=[] + v['CCLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cprogram']=['-Wl,-brtl'] + v['cprogram_PATTERN']='%s' + v['CFLAGS_cshlib']=['-fPIC'] + v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cstlib']=[] + v['cstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlc() + conf.find_ar() + conf.xlc_common_flags() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py new file mode 100644 index 00000000..f348bbfd --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py @@ -0,0 +1,43 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +from waflib.Tools import ccroot,ar +from waflib.Configure import conf +@conf +def find_xlcxx(conf): + cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX') + conf.get_xlc_version(cxx) + conf.env.CXX_NAME='xlc++' +@conf +def xlcxx_common_flags(conf): + v=conf.env + v['CXX_SRC_F']=[] + v['CXX_TGT_F']=['-c','-o'] + if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] + v['CXXLNK_SRC_F']=[] + v['CXXLNK_TGT_F']=['-o'] + v['CPPPATH_ST']='-I%s' + v['DEFINES_ST']='-D%s' + v['LIB_ST']='-l%s' + v['LIBPATH_ST']='-L%s' + v['STLIB_ST']='-l%s' + v['STLIBPATH_ST']='-L%s' + v['RPATH_ST']='-Wl,-rpath,%s' + v['SONAME_ST']=[] + v['SHLIB_MARKER']=[] + v['STLIB_MARKER']=[] + v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] + v['cxxprogram_PATTERN']='%s' + v['CXXFLAGS_cxxshlib']=['-fPIC'] + v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull'] + v['cxxshlib_PATTERN']='lib%s.so' + v['LINKFLAGS_cxxstlib']=[] + v['cxxstlib_PATTERN']='lib%s.a' +def configure(conf): + conf.find_xlcxx() + conf.find_ar() + conf.xlcxx_common_flags() + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py new file mode 100644 index 00000000..b8706eca --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py @@ -0,0 +1,468 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,sys,errno,traceback,inspect,re,shutil,datetime,gc,platform +import subprocess +from collections import deque,defaultdict +try: + import _winreg as winreg +except ImportError: + try: + import winreg + except ImportError: + winreg=None +from waflib import Errors +try: + from collections import UserDict +except ImportError: + from UserDict import UserDict +try: + from hashlib import md5 +except ImportError: + try: + from md5 import md5 + except ImportError: + pass +try: + import threading +except ImportError: + if not'JOBS'in os.environ: + os.environ['JOBS']='1' + class threading(object): + pass + class Lock(object): + def acquire(self): + pass + def release(self): + pass + threading.Lock=threading.Thread=Lock +else: + run_old=threading.Thread.run + def run(*args,**kwargs): + try: + run_old(*args,**kwargs) + except(KeyboardInterrupt,SystemExit): + raise + except Exception: + sys.excepthook(*sys.exc_info()) + threading.Thread.run=run +SIG_NIL='iluvcuteoverload'.encode() +O644=420 +O755=493 +rot_chr=['\\','|','/','-'] +rot_idx=0 +try: + from collections import OrderedDict as ordered_iter_dict +except ImportError: + class ordered_iter_dict(dict): + def __init__(self,*k,**kw): + self.lst=[] + dict.__init__(self,*k,**kw) + def clear(self): + dict.clear(self) + self.lst=[] + def __setitem__(self,key,value): + dict.__setitem__(self,key,value) + try: + self.lst.remove(key) + except ValueError: + pass + self.lst.append(key) + def __delitem__(self,key): + dict.__delitem__(self,key) + try: + self.lst.remove(key) + except ValueError: + pass + def __iter__(self): + for x in self.lst: + yield x + def keys(self): + return self.lst +is_win32=os.sep=='\\'or sys.platform=='win32' +def readf(fname,m='r',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + if encoding: + txt=txt.decode(encoding) + else: + txt=txt.decode() + else: + f=open(fname,m) + try: + txt=f.read() + finally: + f.close() + return txt +def writef(fname,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + f=open(fname,m) + try: + f.write(data) + finally: + f.close() +def h_file(fname): + f=open(fname,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() +def readf_win32(f,m='r',encoding='ISO8859-1'): + flags=os.O_NOINHERIT|os.O_RDONLY + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot read from %r'%f) + if sys.hexversion>0x3000000 and not'b'in m: + m+='b' + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + if encoding: + txt=txt.decode(encoding) + else: + txt=txt.decode() + else: + f=os.fdopen(fd,m) + try: + txt=f.read() + finally: + f.close() + return txt +def writef_win32(f,data,m='w',encoding='ISO8859-1'): + if sys.hexversion>0x3000000 and not'b'in m: + data=data.encode(encoding) + m+='b' + flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT + if'b'in m: + flags|=os.O_BINARY + if'+'in m: + flags|=os.O_RDWR + try: + fd=os.open(f,flags) + except OSError: + raise IOError('Cannot write to %r'%f) + f=os.fdopen(fd,m) + try: + f.write(data) + finally: + f.close() +def h_file_win32(fname): + try: + fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT) + except OSError: + raise IOError('Cannot read from %r'%fname) + f=os.fdopen(fd,'rb') + m=md5() + try: + while fname: + fname=f.read(200000) + m.update(fname) + finally: + f.close() + return m.digest() +readf_unix=readf +writef_unix=writef +h_file_unix=h_file +if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000: + readf=readf_win32 + writef=writef_win32 + h_file=h_file_win32 +try: + x=''.encode('hex') +except LookupError: + import binascii + def to_hex(s): + ret=binascii.hexlify(s) + if not isinstance(ret,str): + ret=ret.decode('utf-8') + return ret +else: + def to_hex(s): + return s.encode('hex') +to_hex.__doc__=""" +Return the hexadecimal representation of a string + +:param s: string to convert +:type s: string +""" +def listdir_win32(s): + if not s: + try: + import ctypes + except ImportError: + return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] + else: + dlen=4 + maxdrives=26 + buf=ctypes.create_string_buffer(maxdrives*dlen) + ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf)) + return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))] + if len(s)==2 and s[1]==":": + s+=os.sep + if not os.path.isdir(s): + e=OSError('%s is not a directory'%s) + e.errno=errno.ENOENT + raise e + return os.listdir(s) +listdir=os.listdir +if is_win32: + listdir=listdir_win32 +def num2ver(ver): + if isinstance(ver,str): + ver=tuple(ver.split('.')) + if isinstance(ver,tuple): + ret=0 + for i in range(4): + if i0x3000000: + ret=ret.encode('iso8859-1','xmlcharrefreplace') + return ret +reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") +def subst_vars(expr,params): + def repl_var(m): + if m.group(1): + return'\\' + if m.group(2): + return'$' + try: + return params.get_flat(m.group(3)) + except AttributeError: + return params[m.group(3)] + return reg_subst.sub(repl_var,expr) +def destos_to_binfmt(key): + if key=='darwin': + return'mac-o' + elif key in('win32','cygwin','uwin','msys'): + return'pe' + return'elf' +def unversioned_sys_platform(): + s=sys.platform + if s.startswith('java'): + from java.lang import System + s=System.getProperty('os.name') + if s=='Mac OS X': + return'darwin' + elif s.startswith('Windows '): + return'win32' + elif s=='OS/2': + return'os2' + elif s=='HP-UX': + return'hp-ux' + elif s in('SunOS','Solaris'): + return'sunos' + else:s=s.lower() + if s=='powerpc': + return'darwin' + if s=='win32'or s=='os2': + return s + if s=='cli'and os.name=='nt': + return'win32' + return re.split('\d+$',s)[0] +def nada(*k,**kw): + pass +class Timer(object): + def __init__(self): + self.start_time=datetime.datetime.utcnow() + def __str__(self): + delta=datetime.datetime.utcnow()-self.start_time + days=delta.days + hours,rem=divmod(delta.seconds,3600) + minutes,seconds=divmod(rem,60) + seconds+=delta.microseconds*1e-6 + result='' + if days: + result+='%dd'%days + if days or hours: + result+='%dh'%hours + if days or hours or minutes: + result+='%dm'%minutes + return'%s%.3fs'%(result,seconds) +if is_win32: + old=shutil.copy2 + def copy2(src,dst): + old(src,dst) + shutil.copystat(src,dst) + setattr(shutil,'copy2',copy2) +if os.name=='java': + try: + gc.disable() + gc.enable() + except NotImplementedError: + gc.disable=gc.enable +def read_la_file(path): + sp=re.compile(r'^([^=]+)=\'(.*)\'$') + dc={} + for line in readf(path).splitlines(): + try: + _,left,right,_=sp.split(line.strip()) + dc[left]=right + except ValueError: + pass + return dc +def nogc(fun): + def f(*k,**kw): + try: + gc.disable() + ret=fun(*k,**kw) + finally: + gc.enable() + return ret + f.__doc__=fun.__doc__ + return f +def run_once(fun): + cache={} + def wrap(k): + try: + return cache[k] + except KeyError: + ret=fun(k) + cache[k]=ret + return ret + wrap.__cache__=cache + wrap.__name__=fun.__name__ + return wrap +def get_registry_app_path(key,filename): + if not winreg: + return None + try: + result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0]) + except WindowsError: + pass + else: + if os.path.isfile(result): + return result +def lib64(): + if os.sep=='/': + if platform.architecture()[0]=='64bit': + if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'): + return'64' + return'' +def sane_path(p): + return os.path.abspath(os.path.expanduser(p)) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py new file mode 100644 index 00000000..55e850d6 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py new file mode 100644 index 00000000..1d8bc78f --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py @@ -0,0 +1,238 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os,re,sys +from waflib import Utils +wlock=Utils.threading.Lock() +try: + from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long +except ImportError: + class AnsiTerm(object): + def __init__(self,stream): + self.stream=stream + try: + self.errors=self.stream.errors + except AttributeError: + pass + self.encoding=self.stream.encoding + def write(self,txt): + try: + wlock.acquire() + self.stream.write(txt) + self.stream.flush() + finally: + wlock.release() + def fileno(self): + return self.stream.fileno() + def flush(self): + self.stream.flush() + def isatty(self): + return self.stream.isatty() +else: + class COORD(Structure): + _fields_=[("X",c_short),("Y",c_short)] + class SMALL_RECT(Structure): + _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)] + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)] + class CONSOLE_CURSOR_INFO(Structure): + _fields_=[('dwSize',c_ulong),('bVisible',c_int)] + try: + _type=unicode + except NameError: + _type=str + to_int=lambda number,default:number and int(number)or default + STD_OUTPUT_HANDLE=-11 + STD_ERROR_HANDLE=-12 + windll.kernel32.GetStdHandle.argtypes=[c_ulong] + windll.kernel32.GetStdHandle.restype=c_ulong + windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long + windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort] + windll.kernel32.SetConsoleTextAttribute.restype=c_long + windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputCharacterW.restype=c_long + windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)] + windll.kernel32.FillConsoleOutputAttribute.restype=c_long + windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)] + windll.kernel32.SetConsoleCursorPosition.restype=c_long + windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)] + windll.kernel32.SetConsoleCursorInfo.restype=c_long + class AnsiTerm(object): + def __init__(self,s): + self.stream=s + try: + self.errors=s.errors + except AttributeError: + pass + self.encoding=s.encoding + self.cursor_history=[] + handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE + self.hconsole=windll.kernel32.GetStdHandle(handle) + self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + self._csinfo=CONSOLE_CURSOR_INFO() + windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo)) + self._isatty=r==1 + def screen_buffer_info(self): + windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo)) + return self._sbinfo + def clear_line(self,param): + mode=param and int(param)or 0 + sbinfo=self.screen_buffer_info() + if mode==1: + line_start=COORD(0,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X + elif mode==2: + line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y) + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + else: + line_start=sbinfo.CursorPosition + line_length=sbinfo.Size.X-sbinfo.CursorPosition.X + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written)) + def clear_screen(self,param): + mode=to_int(param,0) + sbinfo=self.screen_buffer_info() + if mode==1: + clear_start=COORD(0,0) + clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y + elif mode==2: + clear_start=COORD(0,0) + clear_length=sbinfo.Size.X*sbinfo.Size.Y + windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start) + else: + clear_start=sbinfo.CursorPosition + clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y)) + chars_written=c_ulong() + windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written)) + windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written)) + def push_cursor(self,param): + sbinfo=self.screen_buffer_info() + self.cursor_history.append(sbinfo.CursorPosition) + def pop_cursor(self,param): + if self.cursor_history: + old_pos=self.cursor_history.pop() + windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos) + def set_cursor(self,param): + y,sep,x=param.partition(';') + x=to_int(x,1)-1 + y=to_int(y,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def set_column(self,param): + x=to_int(param,1)-1 + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_cursor(self,x_offset=0,y_offset=0): + sbinfo=self.screen_buffer_info() + new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y)) + windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) + def move_up(self,param): + self.move_cursor(y_offset=-to_int(param,1)) + def move_down(self,param): + self.move_cursor(y_offset=to_int(param,1)) + def move_left(self,param): + self.move_cursor(x_offset=-to_int(param,1)) + def move_right(self,param): + self.move_cursor(x_offset=to_int(param,1)) + def next_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1)) + def prev_line(self,param): + sbinfo=self.screen_buffer_info() + self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1)) + def rgb2bgr(self,c): + return((c&1)<<2)|(c&2)|((c&4)>>2) + def set_color(self,param): + cols=param.split(';') + sbinfo=self.screen_buffer_info() + attr=sbinfo.Attributes + for c in cols: + c=to_int(c,0) + if 29>4)|((attr&0x07)<<4) + windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr) + def show_cursor(self,param): + self._csinfo.bVisible=1 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + def hide_cursor(self,param): + self._csinfo.bVisible=0 + windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) + ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} + ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') + def write(self,text): + try: + wlock.acquire() + if self._isatty: + for param,cmd,txt in self.ansi_tokens.findall(text): + if cmd: + cmd_func=self.ansi_command_table.get(cmd) + if cmd_func: + cmd_func(self,param) + else: + self.writeconsole(txt) + else: + self.stream.write(text) + finally: + wlock.release() + def writeconsole(self,txt): + chars_written=c_ulong() + writeconsole=windll.kernel32.WriteConsoleA + if isinstance(txt,_type): + writeconsole=windll.kernel32.WriteConsoleW + done=0 + todo=len(txt) + chunk=32<<10 + while todo!=0: + doing=min(chunk,todo) + buf=txt[done:done+doing] + r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None) + if r==0: + chunk>>=1 + continue + done+=doing + todo-=doing + def fileno(self): + return self.stream.fileno() + def flush(self): + pass + def isatty(self): + return self._isatty + if sys.stdout.isatty()or sys.stderr.isatty(): + handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE + console=windll.kernel32.GetStdHandle(handle) + sbinfo=CONSOLE_SCREEN_BUFFER_INFO() + def get_term_cols(): + windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo)) + return sbinfo.Size.X-1 +try: + import struct,fcntl,termios +except ImportError: + pass +else: + if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'): + FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno() + def fun(): + return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1] + try: + fun() + except Exception as e: + pass + else: + get_term_cols=fun diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py new file mode 100644 index 00000000..55e850d6 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py @@ -0,0 +1,4 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py new file mode 100644 index 00000000..fd541452 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py @@ -0,0 +1,301 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import sys +from waflib import ConfigSet,Logs,Options,Scripting,Task,Build,Configure,Node,Runner,TaskGen,Utils,Errors,Context +sys.modules['Environment']=ConfigSet +ConfigSet.Environment=ConfigSet.ConfigSet +sys.modules['Logs']=Logs +sys.modules['Options']=Options +sys.modules['Scripting']=Scripting +sys.modules['Task']=Task +sys.modules['Build']=Build +sys.modules['Configure']=Configure +sys.modules['Node']=Node +sys.modules['Runner']=Runner +sys.modules['TaskGen']=TaskGen +sys.modules['Utils']=Utils +sys.modules['Constants']=Context +Context.SRCDIR='' +Context.BLDDIR='' +from waflib.Tools import c_preproc +sys.modules['preproc']=c_preproc +from waflib.Tools import c_config +sys.modules['config_c']=c_config +ConfigSet.ConfigSet.copy=ConfigSet.ConfigSet.derive +ConfigSet.ConfigSet.set_variant=Utils.nada +Utils.pproc=Utils.subprocess +Build.BuildContext.add_subdirs=Build.BuildContext.recurse +Build.BuildContext.new_task_gen=Build.BuildContext.__call__ +Build.BuildContext.is_install=0 +Node.Node.relpath_gen=Node.Node.path_from +Utils.pproc=Utils.subprocess +Utils.get_term_cols=Logs.get_term_cols +def cmd_output(cmd,**kw): + silent=False + if'silent'in kw: + silent=kw['silent'] + del(kw['silent']) + if'e'in kw: + tmp=kw['e'] + del(kw['e']) + kw['env']=tmp + kw['shell']=isinstance(cmd,str) + kw['stdout']=Utils.subprocess.PIPE + if silent: + kw['stderr']=Utils.subprocess.PIPE + try: + p=Utils.subprocess.Popen(cmd,**kw) + output=p.communicate()[0] + except OSError as e: + raise ValueError(str(e)) + if p.returncode: + if not silent: + msg="command execution failed: %s -> %r"%(cmd,str(output)) + raise ValueError(msg) + output='' + return output +Utils.cmd_output=cmd_output +def name_to_obj(self,s,env=None): + if Logs.verbose: + Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"') + return self.get_tgen_by_name(s) +Build.BuildContext.name_to_obj=name_to_obj +def env_of_name(self,name): + try: + return self.all_envs[name] + except KeyError: + Logs.error('no such environment: '+name) + return None +Build.BuildContext.env_of_name=env_of_name +def set_env_name(self,name,env): + self.all_envs[name]=env + return env +Configure.ConfigurationContext.set_env_name=set_env_name +def retrieve(self,name,fromenv=None): + try: + env=self.all_envs[name] + except KeyError: + env=ConfigSet.ConfigSet() + self.prepare_env(env) + self.all_envs[name]=env + else: + if fromenv: + Logs.warn("The environment %s may have been configured already"%name) + return env +Configure.ConfigurationContext.retrieve=retrieve +Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse +Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load +Configure.conftest=Configure.conf +Configure.ConfigurationError=Errors.ConfigurationError +Utils.WafError=Errors.WafError +Options.OptionsContext.sub_options=Options.OptionsContext.recurse +Options.OptionsContext.tool_options=Context.Context.load +Options.Handler=Options.OptionsContext +Task.simple_task_type=Task.task_type_from_func=Task.task_factory +Task.TaskBase.classes=Task.classes +def setitem(self,key,value): + if key.startswith('CCFLAGS'): + key=key[1:] + self.table[key]=value +ConfigSet.ConfigSet.__setitem__=setitem +@TaskGen.feature('d') +@TaskGen.before('apply_incpaths') +def old_importpaths(self): + if getattr(self,'importpaths',[]): + self.includes=self.importpaths +from waflib import Context +eld=Context.load_tool +def load_tool(*k,**kw): + ret=eld(*k,**kw) + if'set_options'in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "set_options" to options') + ret.options=ret.set_options + if'detect'in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "detect" to "configure"') + ret.configure=ret.detect + return ret +Context.load_tool=load_tool +def get_curdir(self): + return self.path.abspath() +Context.Context.curdir=property(get_curdir,Utils.nada) +def get_srcdir(self): + return self.srcnode.abspath() +Configure.ConfigurationContext.srcdir=property(get_srcdir,Utils.nada) +def get_blddir(self): + return self.bldnode.abspath() +Configure.ConfigurationContext.blddir=property(get_blddir,Utils.nada) +Configure.ConfigurationContext.check_message_1=Configure.ConfigurationContext.start_msg +Configure.ConfigurationContext.check_message_2=Configure.ConfigurationContext.end_msg +rev=Context.load_module +def load_module(path,encoding=None): + ret=rev(path,encoding) + if'set_options'in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "set_options" to "options" (%r)'%path) + ret.options=ret.set_options + if'srcdir'in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "srcdir" to "top" (%r)'%path) + ret.top=ret.srcdir + if'blddir'in ret.__dict__: + if Logs.verbose: + Logs.warn('compat: rename "blddir" to "out" (%r)'%path) + ret.out=ret.blddir + Utils.g_module=Context.g_module + Options.launch_dir=Context.launch_dir + return ret +Context.load_module=load_module +old_post=TaskGen.task_gen.post +def post(self): + self.features=self.to_list(self.features) + if'cc'in self.features: + if Logs.verbose: + Logs.warn('compat: the feature cc does not exist anymore (use "c")') + self.features.remove('cc') + self.features.append('c') + if'cstaticlib'in self.features: + if Logs.verbose: + Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")') + self.features.remove('cstaticlib') + self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib') + if getattr(self,'ccflags',None): + if Logs.verbose: + Logs.warn('compat: "ccflags" was renamed to "cflags"') + self.cflags=self.ccflags + return old_post(self) +TaskGen.task_gen.post=post +def waf_version(*k,**kw): + Logs.warn('wrong version (waf_version was removed in waf 1.6)') +Utils.waf_version=waf_version +import os +@TaskGen.feature('c','cxx','d') +@TaskGen.before('apply_incpaths','propagate_uselib_vars') +@TaskGen.after('apply_link','process_source') +def apply_uselib_local(self): + env=self.env + from waflib.Tools.ccroot import stlink_task + self.uselib=self.to_list(getattr(self,'uselib',[])) + self.includes=self.to_list(getattr(self,'includes',[])) + names=self.to_list(getattr(self,'uselib_local',[])) + get=self.bld.get_tgen_by_name + seen=set([]) + seen_uselib=set([]) + tmp=Utils.deque(names) + if tmp: + if Logs.verbose: + Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') + while tmp: + lib_name=tmp.popleft() + if lib_name in seen: + continue + y=get(lib_name) + y.post() + seen.add(lib_name) + if getattr(y,'uselib_local',None): + for x in self.to_list(getattr(y,'uselib_local',[])): + obj=get(x) + obj.post() + if getattr(obj,'link_task',None): + if not isinstance(obj.link_task,stlink_task): + tmp.append(x) + if getattr(y,'link_task',None): + link_name=y.target[y.target.rfind(os.sep)+1:] + if isinstance(y.link_task,stlink_task): + env.append_value('STLIB',[link_name]) + else: + env.append_value('LIB',[link_name]) + self.link_task.set_run_after(y.link_task) + self.link_task.dep_nodes+=y.link_task.outputs + tmp_path=y.link_task.outputs[0].parent.bldpath() + if not tmp_path in env['LIBPATH']: + env.prepend_value('LIBPATH',[tmp_path]) + for v in self.to_list(getattr(y,'uselib',[])): + if v not in seen_uselib: + seen_uselib.add(v) + if not env['STLIB_'+v]: + if not v in self.uselib: + self.uselib.insert(0,v) + if getattr(y,'export_includes',None): + self.includes.extend(y.to_incnodes(y.export_includes)) +@TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib') +@TaskGen.after('apply_link') +def apply_objdeps(self): + names=getattr(self,'add_objects',[]) + if not names: + return + names=self.to_list(names) + get=self.bld.get_tgen_by_name + seen=[] + while names: + x=names[0] + if x in seen: + names=names[1:] + continue + y=get(x) + if getattr(y,'add_objects',None): + added=0 + lst=y.to_list(y.add_objects) + lst.reverse() + for u in lst: + if u in seen:continue + added=1 + names=[u]+names + if added:continue + y.post() + seen.append(x) + for t in getattr(y,'compiled_tasks',[]): + self.link_task.inputs.extend(t.outputs) +@TaskGen.after('apply_link') +def process_obj_files(self): + if not hasattr(self,'obj_files'): + return + for x in self.obj_files: + node=self.path.find_resource(x) + self.link_task.inputs.append(node) +@TaskGen.taskgen_method +def add_obj_file(self,file): + if not hasattr(self,'obj_files'):self.obj_files=[] + if not'process_obj_files'in self.meths:self.meths.append('process_obj_files') + self.obj_files.append(file) +old_define=Configure.ConfigurationContext.__dict__['define'] +@Configure.conf +def define(self,key,val,quote=True,comment=''): + old_define(self,key,val,quote,comment) + if key.startswith('HAVE_'): + self.env[key]=1 +old_undefine=Configure.ConfigurationContext.__dict__['undefine'] +@Configure.conf +def undefine(self,key,comment=''): + old_undefine(self,key,comment) + if key.startswith('HAVE_'): + self.env[key]=0 +def set_incdirs(self,val): + Logs.warn('compat: change "export_incdirs" by "export_includes"') + self.export_includes=val +TaskGen.task_gen.export_incdirs=property(None,set_incdirs) +def install_dir(self,path): + if not path: + return[] + destpath=Utils.subst_vars(path,self.env) + if self.is_install>0: + Logs.info('* creating %s'%destpath) + Utils.check_dir(destpath) + elif self.is_install<0: + Logs.info('* removing %s'%destpath) + try: + os.remove(destpath) + except OSError: + pass +Build.BuildContext.install_dir=install_dir +repl={'apply_core':'process_source','apply_lib_vars':'process_source','apply_obj_vars':'propagate_uselib_vars','exec_rule':'process_rule'} +def after(*k): + k=[repl.get(key,key)for key in k] + return TaskGen.after_method(*k) +def before(*k): + k=[repl.get(key,key)for key in k] + return TaskGen.before_method(*k) +TaskGen.before=before diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py new file mode 100644 index 00000000..1721c353 --- /dev/null +++ b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py @@ -0,0 +1,53 @@ +#! /usr/bin/env python +# encoding: utf-8 +# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file + +import os +all_modifs={} +def fixdir(dir): + global all_modifs + for k in all_modifs: + for v in all_modifs[k]: + modif(os.path.join(dir,'waflib'),k,v) +def modif(dir,name,fun): + if name=='*': + lst=[] + for y in'. Tools extras'.split(): + for x in os.listdir(os.path.join(dir,y)): + if x.endswith('.py'): + lst.append(y+os.sep+x) + for x in lst: + modif(dir,x,fun) + return + filename=os.path.join(dir,name) + f=open(filename,'r') + try: + txt=f.read() + finally: + f.close() + txt=fun(txt) + f=open(filename,'w') + try: + f.write(txt) + finally: + f.close() +def subst(*k): + def do_subst(fun): + global all_modifs + for x in k: + try: + all_modifs[x].append(fun) + except KeyError: + all_modifs[x]=[fun] + return fun + return do_subst +@subst('*') +def r1(code): + code=code.replace('as e:',',e:') + code=code.replace(".decode(sys.stdout.encoding or 'iso8859-1')",'') + code=code.replace('.encode()','') + return code +@subst('Runner.py') +def r4(code): + code=code.replace('next(self.biter)','self.biter.next()') + return code diff --git a/src/dynamixel/controllers/usb2dynamixel.hpp b/src/dynamixel/controllers/usb2dynamixel.hpp index 73a3197f..ed3b6e54 100644 --- a/src/dynamixel/controllers/usb2dynamixel.hpp +++ b/src/dynamixel/controllers/usb2dynamixel.hpp @@ -147,10 +147,10 @@ namespace dynamixel { const int ret = write(_fd, packet.data(), packet.size()); - std::cout << "Send: "; - for (size_t i = 0; i < packet.size(); ++i) - std::cout << "0x" << std::setfill('0') << std::setw(2) << std::hex << (unsigned int)packet[i] << " "; - std::cout << std::endl; + // std::cout << "Send: "; + // for (size_t i = 0; i < packet.size(); ++i) + // std::cout << "0x" << std::setfill('0') << std::setw(2) << std::hex << (unsigned int)packet[i] << " "; + // std::cout << std::endl; if (ret == -1) { throw errors::Error("Usb2Dynamixel::Send write error " + write_error_string(errno)); @@ -178,23 +178,23 @@ namespace dynamixel { std::vector packet; packet.reserve(_recv_buffer_size); - std::cout << "Receive:" << std::endl; + // std::cout << "Receive:" << std::endl; do { double current_time = get_time(); uint8_t byte; int res = read(_fd, &byte, 1); if (res > 0) { - std::cout << std::setfill('0') << std::setw(2) - << std::hex << (unsigned int)byte << " "; + // std::cout << std::setfill('0') << std::setw(2) + // << std::hex << (unsigned int)byte << " "; packet.push_back(byte); state = status.decode_packet(packet, _report_bad_packet); if (state == DecodeState::INVALID) { - std::cout << "\tBad packet: "; - for (const auto byte : packet) - std::cout << std::setfill('0') << std::setw(2) << std::hex << (unsigned int)byte << " "; - std::cout << std::endl; + // std::cout << "\tBad packet: "; + // for (const auto byte : packet) + // std::cout << std::setfill('0') << std::setw(2) << std::hex << (unsigned int)byte << " "; + // std::cout << std::endl; packet.clear(); } @@ -206,8 +206,8 @@ namespace dynamixel { return false; } while (state != DecodeState::DONE); - std::cout << std::endl; - std::cout << std::dec; + // std::cout << std::endl; + // std::cout << std::dec; return true; } diff --git a/src/dynamixel/instructions/bulk_read.hpp b/src/dynamixel/instructions/bulk_read.hpp index 85a5087f..8891bfd9 100644 --- a/src/dynamixel/instructions/bulk_read.hpp +++ b/src/dynamixel/instructions/bulk_read.hpp @@ -12,16 +12,27 @@ namespace dynamixel { template class BulkRead : public InstructionPacket { public: - BulkRead(const std::vector& address, const std::vector& ids, + BulkRead(const std::vector& ids, const std::vector& address, const std::vector& data_length) - : InstructionPacket(T::broadcast_id, T::Instructions::bulk_read, _get_parameters(address, ids, data_length)) {} - // const std::vector& address + : InstructionPacket(T::broadcast_id, T::Instructions::bulk_read, _get_parameters(ids, address, data_length)) {} + + BulkRead(const std::vector& ids, const typename T::address_t& address, + const uint8_t& data_length) + : InstructionPacket(T::broadcast_id, T::Instructions::bulk_read, _get_parameters(ids, address, data_length)) {} + + BulkRead(const std::vector& ids, const std::vector& address, + const std::vector& data_length) + : InstructionPacket(T::broadcast_id, T::Instructions::bulk_read, _get_parameters(ids, address, data_length)) {} + + BulkRead(const std::vector& ids, const typename T::address_t& address, + const uint16_t& data_length) + : InstructionPacket(T::broadcast_id, T::Instructions::bulk_read, _get_parameters(ids, address, data_length)) {} + protected: - std::vector _get_parameters(const std::vector& address, const std::vector& ids, + std::vector _get_parameters(const std::vector& ids, const std::vector& address, const std::vector& data_length) { - if (ids.size() == 0) - throw errors::Error("BulkRead: ids vector of size zero"); + _check_arguments(ids, address, data_length); std::vector parameters(3 * ids.size() + 3); @@ -39,8 +50,8 @@ namespace dynamixel { return parameters; } - std::vector _get_parameters(const std::vector& address, const std::vector& ids, - const std::vector& data_length) + std::vector _get_parameters(const std::vector& ids, const uint8_t& address, + const uint8_t& data_length) { if (ids.size() == 0) throw errors::Error("BulkRead: ids vector of size zero"); @@ -48,17 +59,69 @@ namespace dynamixel { std::vector parameters(3 * ids.size() + 3); parameters[0] = 0x00; + + size_t curr = 1; + + for (size_t i = 0; i < ids.size(); i++) { + + parameters[curr++] = data_length; + parameters[curr++] = ids[i]; + parameters[curr++] = address; + } + + return parameters; + } + + std::vector _get_parameters(const std::vector& ids, const std::vector& address, + const std::vector& data_length) + { + _check_arguments(ids, address, data_length); + + std::vector parameters(5 * ids.size() + 5); + size_t curr = 0; + for (size_t m = 0; m < ids.size(); m++) { - parameters[m + 1] = (uint8_t)(data_length[m] & 0xFF); - parameters[m + 2] = (uint8_t)(data_length[m] >> 8) & 0xFF; - parameters[m + 3] = (uint8_t)(ids[m] & 0xFF); - parameters[m + 4] = (uint8_t)(ids[m] >> 8) & 0xFF; - parameters[m + 5] = (uint8_t)(address[m] & 0xFF); - parameters[m + 6] = (uint8_t)(address[m] >> 8) & 0xFF; + parameters[curr++] = ids[m]; + parameters[curr++] = (uint8_t)(address[m] & 0xFF); + parameters[curr++] = (uint8_t)(address[m] >> 8) & 0xFF; + parameters[curr++] = (uint8_t)(data_length[m] & 0xFF); + parameters[curr++] = (uint8_t)(data_length[m] >> 8) & 0xFF; } return parameters; } + + std::vector _get_parameters(const std::vector& ids, const uint16_t& address, + const uint16_t& data_length) + { + if (ids.size() == 0) + throw errors::Error("BulkRead: ids vector of size zero"); + + std::vector parameters(5 * ids.size() + 5); + size_t curr = 0; + + for (size_t m = 0; m < ids.size(); m++) { + parameters[curr++] = ids[m]; + parameters[curr++] = (uint8_t)(address & 0xFF); + parameters[curr++] = (uint8_t)(address >> 8) & 0xFF; + parameters[curr++] = (uint8_t)(data_length & 0xFF); + parameters[curr++] = (uint8_t)(data_length >> 8) & 0xFF; + } + + return parameters; + } + + template + void _check_arguments(std::vector ids, std::vector addresses, + std::vector lengths) + { + if (ids.size() == 0) + throw errors::Error("SyncWrite: ids vector of size zero"); + if (ids.size() != addresses.size()) + throw errors::Error("BulkRead: mismatching size for ids and addresses"); + if (ids.size() != lengths.size()) + throw errors::Error("BulkRead: mismatching size for ids and lengths/sizes"); + } }; } // namespace instructions } // namespace dynamixel diff --git a/src/dynamixel/instructions/sync_write.hpp b/src/dynamixel/instructions/sync_write.hpp index 2f7044cd..187e293c 100644 --- a/src/dynamixel/instructions/sync_write.hpp +++ b/src/dynamixel/instructions/sync_write.hpp @@ -1,11 +1,11 @@ #ifndef DYNAMIXEL_INSTRUCTIONS_SYNC_WRITE_HPP_ #define DYNAMIXEL_INSTRUCTIONS_SYNC_WRITE_HPP_ -#include #include +#include -#include "../instruction_packet.hpp" #include "../errors/error.hpp" +#include "../instruction_packet.hpp" namespace dynamixel { namespace instructions { @@ -77,7 +77,7 @@ namespace dynamixel { return parameters; } }; - } -} + } // namespace instructions +} // namespace dynamixel #endif diff --git a/src/tools/utility.hpp b/src/tools/utility.hpp index 64ec0814..fd485320 100644 --- a/src/tools/utility.hpp +++ b/src/tools/utility.hpp @@ -434,7 +434,7 @@ namespace dynamixel { "vectors of IDs and angles should have " "the same length"); _serial_interface.send( - std::make_shared(0)->set_goal_positions(ids, angles)); + std::make_shared(0)->set_goal_positions(ids, angles)); //Mx28 StatusPacket status; for (int i = 0; i < ids.size(); i++) { @@ -529,9 +529,12 @@ namespace dynamixel { ids.push_back(servo.first); } - _serial_interface.send( - std::make_shared(0)->get_current_positions(ids)); - + for (auto servo : _servos) { + _serial_interface.send( + std::make_shared(0)->get_current_positions_XL(ids)); + // servo.second->get_current_positions_all(ids)); + break; + } StatusPacket status; for (auto servo : _servos) { From f2f33c06b16839a80534644d94a82e80a04f3768 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 19 Mar 2019 16:17:10 +0100 Subject: [PATCH 05/22] .waf delete --- .../waflib/Build.py | 759 ---------------- .../waflib/ConfigSet.py | 153 ---- .../waflib/Configure.py | 379 -------- .../waflib/Context.py | 394 --------- .../waflib/Errors.py | 37 - .../waflib/Logs.py | 200 ----- .../waflib/Node.py | 491 ----------- .../waflib/Options.py | 147 ---- .../waflib/Runner.py | 207 ----- .../waflib/Scripting.py | 407 --------- .../waflib/Task.py | 686 --------------- .../waflib/TaskGen.py | 433 ---------- .../waflib/Tools/__init__.py | 4 - .../waflib/Tools/ar.py | 13 - .../waflib/Tools/asm.py | 24 - .../waflib/Tools/bison.py | 28 - .../waflib/Tools/c.py | 26 - .../waflib/Tools/c_aliases.py | 63 -- .../waflib/Tools/c_config.py | 758 ---------------- .../waflib/Tools/c_osx.py | 137 --- .../waflib/Tools/c_preproc.py | 611 ------------- .../waflib/Tools/c_tests.py | 152 ---- .../waflib/Tools/ccroot.py | 447 ---------- .../waflib/Tools/clang.py | 20 - .../waflib/Tools/clangxx.py | 20 - .../waflib/Tools/compiler_c.py | 40 - .../waflib/Tools/compiler_cxx.py | 40 - .../waflib/Tools/compiler_d.py | 37 - .../waflib/Tools/compiler_fc.py | 39 - .../waflib/Tools/cs.py | 132 --- .../waflib/Tools/cxx.py | 26 - .../waflib/Tools/d.py | 54 -- .../waflib/Tools/d_config.py | 52 -- .../waflib/Tools/d_scan.py | 133 --- .../waflib/Tools/dbus.py | 29 - .../waflib/Tools/dmd.py | 51 -- .../waflib/Tools/errcheck.py | 163 ---- .../waflib/Tools/fc.py | 115 --- .../waflib/Tools/fc_config.py | 286 ------- .../waflib/Tools/fc_scan.py | 64 -- .../waflib/Tools/flex.py | 32 - .../waflib/Tools/g95.py | 54 -- .../waflib/Tools/gas.py | 12 - .../waflib/Tools/gcc.py | 102 --- .../waflib/Tools/gdc.py | 35 - .../waflib/Tools/gfortran.py | 68 -- .../waflib/Tools/glib2.py | 234 ----- .../waflib/Tools/gnu_dirs.py | 66 -- .../waflib/Tools/gxx.py | 102 --- .../waflib/Tools/icc.py | 22 - .../waflib/Tools/icpc.py | 22 - .../waflib/Tools/ifort.py | 48 -- .../waflib/Tools/intltool.py | 97 --- .../waflib/Tools/irixcc.py | 45 - .../waflib/Tools/javaw.py | 305 ------- .../waflib/Tools/kde4.py | 48 -- .../waflib/Tools/ldc2.py | 36 - .../waflib/Tools/lua.py | 18 - .../waflib/Tools/msvc.py | 809 ------------------ .../waflib/Tools/nasm.py | 16 - .../waflib/Tools/perl.py | 90 -- .../waflib/Tools/python.py | 399 --------- .../waflib/Tools/qt4.py | 442 ---------- .../waflib/Tools/qt5.py | 489 ----------- .../waflib/Tools/ruby.py | 103 --- .../waflib/Tools/suncc.py | 46 - .../waflib/Tools/suncxx.py | 46 - .../waflib/Tools/tex.py | 317 ------- .../waflib/Tools/vala.py | 211 ----- .../waflib/Tools/waf_unit_test.py | 106 --- .../waflib/Tools/winres.py | 85 -- .../waflib/Tools/xlc.py | 43 - .../waflib/Tools/xlcxx.py | 43 - .../waflib/Utils.py | 468 ---------- .../waflib/__init__.py | 4 - .../waflib/ansiterm.py | 238 ------ .../waflib/extras/__init__.py | 4 - .../waflib/extras/compat15.py | 301 ------- .../waflib/fixpy2.py | 53 -- 79 files changed, 13516 deletions(-) delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/__init__.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ar.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/asm.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/bison.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_aliases.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_config.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/dbus.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/dmd.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gxx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/icc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/icpc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ifort.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/python.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py delete mode 100644 .waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py deleted file mode 100644 index a696a46c..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Build.py +++ /dev/null @@ -1,759 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,sys,errno,re,shutil,stat -try: - import cPickle -except ImportError: - import pickle as cPickle -from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors -import waflib.Node -CACHE_DIR='c4che' -CACHE_SUFFIX='_cache.py' -INSTALL=1337 -UNINSTALL=-1337 -SAVED_ATTRS='root node_deps raw_deps task_sigs'.split() -CFG_FILES='cfg_files' -POST_AT_ONCE=0 -POST_LAZY=1 -POST_BOTH=2 -PROTOCOL=-1 -if sys.platform=='cli': - PROTOCOL=0 -class BuildContext(Context.Context): - '''executes the build''' - cmd='build' - variant='' - def __init__(self,**kw): - super(BuildContext,self).__init__(**kw) - self.is_install=0 - self.top_dir=kw.get('top_dir',Context.top_dir) - self.run_dir=kw.get('run_dir',Context.run_dir) - self.post_mode=POST_AT_ONCE - self.out_dir=kw.get('out_dir',Context.out_dir) - self.cache_dir=kw.get('cache_dir',None) - if not self.cache_dir: - self.cache_dir=os.path.join(self.out_dir,CACHE_DIR) - self.all_envs={} - self.task_sigs={} - self.node_deps={} - self.raw_deps={} - self.cache_dir_contents={} - self.task_gen_cache_names={} - self.launch_dir=Context.launch_dir - self.jobs=Options.options.jobs - self.targets=Options.options.targets - self.keep=Options.options.keep - self.progress_bar=Options.options.progress_bar - self.deps_man=Utils.defaultdict(list) - self.current_group=0 - self.groups=[] - self.group_names={} - def get_variant_dir(self): - if not self.variant: - return self.out_dir - return os.path.join(self.out_dir,self.variant) - variant_dir=property(get_variant_dir,None) - def __call__(self,*k,**kw): - kw['bld']=self - ret=TaskGen.task_gen(*k,**kw) - self.task_gen_cache_names={} - self.add_to_group(ret,group=kw.get('group',None)) - return ret - def rule(self,*k,**kw): - def f(rule): - ret=self(*k,**kw) - ret.rule=rule - return ret - return f - def __copy__(self): - raise Errors.WafError('build contexts are not supposed to be copied') - def install_files(self,*k,**kw): - pass - def install_as(self,*k,**kw): - pass - def symlink_as(self,*k,**kw): - pass - def load_envs(self): - node=self.root.find_node(self.cache_dir) - if not node: - raise Errors.WafError('The project was not configured: run "waf configure" first!') - lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True) - if not lst: - raise Errors.WafError('The cache directory is empty: reconfigure the project') - for x in lst: - name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/') - env=ConfigSet.ConfigSet(x.abspath()) - self.all_envs[name]=env - for f in env[CFG_FILES]: - newnode=self.root.find_resource(f) - try: - h=Utils.h_file(newnode.abspath()) - except(IOError,AttributeError): - Logs.error('cannot find %r'%f) - h=Utils.SIG_NIL - newnode.sig=h - def init_dirs(self): - if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)): - raise Errors.WafError('The project was not configured: run "waf configure" first!') - self.path=self.srcnode=self.root.find_dir(self.top_dir) - self.bldnode=self.root.make_node(self.variant_dir) - self.bldnode.mkdir() - def execute(self): - self.restore() - if not self.all_envs: - self.load_envs() - self.execute_build() - def execute_build(self): - Logs.info("Waf: Entering directory `%s'"%self.variant_dir) - self.recurse([self.run_dir]) - self.pre_build() - self.timer=Utils.Timer() - try: - self.compile() - finally: - if self.progress_bar==1 and sys.stderr.isatty(): - c=len(self.returned_tasks)or 1 - m=self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL) - Logs.info(m,extra={'stream':sys.stderr,'c1':Logs.colors.cursor_off,'c2':Logs.colors.cursor_on}) - Logs.info("Waf: Leaving directory `%s'"%self.variant_dir) - self.post_build() - def restore(self): - try: - env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py')) - except EnvironmentError: - pass - else: - if env['version']').ljust(cols) - msg=Logs.indicator%(left,bar,right) - return msg - def declare_chain(self,*k,**kw): - return TaskGen.declare_chain(*k,**kw) - def pre_build(self): - for m in getattr(self,'pre_funs',[]): - m(self) - def post_build(self): - for m in getattr(self,'post_funs',[]): - m(self) - def add_pre_fun(self,meth): - try: - self.pre_funs.append(meth) - except AttributeError: - self.pre_funs=[meth] - def add_post_fun(self,meth): - try: - self.post_funs.append(meth) - except AttributeError: - self.post_funs=[meth] - def get_group(self,x): - if not self.groups: - self.add_group() - if x is None: - return self.groups[self.current_group] - if x in self.group_names: - return self.group_names[x] - return self.groups[x] - def add_to_group(self,tgen,group=None): - assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase)) - tgen.bld=self - self.get_group(group).append(tgen) - def get_group_name(self,g): - if not isinstance(g,list): - g=self.groups[g] - for x in self.group_names: - if id(self.group_names[x])==id(g): - return x - return'' - def get_group_idx(self,tg): - se=id(tg) - for i in range(len(self.groups)): - for t in self.groups[i]: - if id(t)==se: - return i - return None - def add_group(self,name=None,move=True): - if name and name in self.group_names: - Logs.error('add_group: name %s already present'%name) - g=[] - self.group_names[name]=g - self.groups.append(g) - if move: - self.current_group=len(self.groups)-1 - def set_group(self,idx): - if isinstance(idx,str): - g=self.group_names[idx] - for i in range(len(self.groups)): - if id(g)==id(self.groups[i]): - self.current_group=i - break - else: - self.current_group=idx - def total(self): - total=0 - for group in self.groups: - for tg in group: - try: - total+=len(tg.tasks) - except AttributeError: - total+=1 - return total - def get_targets(self): - to_post=[] - min_grp=0 - for name in self.targets.split(','): - tg=self.get_tgen_by_name(name) - m=self.get_group_idx(tg) - if m>min_grp: - min_grp=m - to_post=[tg] - elif m==min_grp: - to_post.append(tg) - return(min_grp,to_post) - def get_all_task_gen(self): - lst=[] - for g in self.groups: - lst.extend(g) - return lst - def post_group(self): - if self.targets=='*': - for tg in self.groups[self.cur]: - try: - f=tg.post - except AttributeError: - pass - else: - f() - elif self.targets: - if self.cur259 and not tgt.startswith('\\\\?\\'): - tgt='\\\\?\\'+tgt - shutil.copy2(src,tgt) - os.chmod(tgt,kw.get('chmod',Utils.O644)) - def do_install(self,src,tgt,**kw): - d,_=os.path.split(tgt) - if not d: - raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt)) - Utils.check_dir(d) - srclbl=src.replace(self.srcnode.abspath()+os.sep,'') - if not Options.options.force: - try: - st1=os.stat(tgt) - st2=os.stat(src) - except OSError: - pass - else: - if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size: - if not self.progress_bar: - Logs.info('- install %s (from %s)'%(tgt,srclbl)) - return False - if not self.progress_bar: - Logs.info('+ install %s (from %s)'%(tgt,srclbl)) - try: - os.chmod(tgt,Utils.O644|stat.S_IMODE(os.stat(tgt).st_mode)) - except EnvironmentError: - pass - try: - os.remove(tgt) - except OSError: - pass - try: - self.copy_fun(src,tgt,**kw) - except IOError: - try: - os.stat(src) - except EnvironmentError: - Logs.error('File %r does not exist'%src) - raise Errors.WafError('Could not install the file %r'%tgt) - def do_link(self,src,tgt,**kw): - d,_=os.path.split(tgt) - Utils.check_dir(d) - link=False - if not os.path.islink(tgt): - link=True - elif os.readlink(tgt)!=src: - link=True - if link: - try:os.remove(tgt) - except OSError:pass - if not self.progress_bar: - Logs.info('+ symlink %s (to %s)'%(tgt,src)) - os.symlink(src,tgt) - else: - if not self.progress_bar: - Logs.info('- symlink %s (to %s)'%(tgt,src)) - def run_task_now(self,tsk,postpone): - tsk.post() - if not postpone: - if tsk.runnable_status()==Task.ASK_LATER: - raise self.WafError('cannot post the task %r'%tsk) - tsk.run() - tsk.hasrun=True - def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True,task=None): - assert(dest) - tsk=inst(env=env or self.env) - tsk.bld=self - tsk.path=cwd or self.path - tsk.chmod=chmod - tsk.task=task - if isinstance(files,waflib.Node.Node): - tsk.source=[files] - else: - tsk.source=Utils.to_list(files) - tsk.dest=dest - tsk.exec_task=tsk.exec_install_files - tsk.relative_trick=relative_trick - if add:self.add_to_group(tsk) - self.run_task_now(tsk,postpone) - return tsk - def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True,task=None): - assert(dest) - tsk=inst(env=env or self.env) - tsk.bld=self - tsk.path=cwd or self.path - tsk.chmod=chmod - tsk.source=[srcfile] - tsk.task=task - tsk.dest=dest - tsk.exec_task=tsk.exec_install_as - if add:self.add_to_group(tsk) - self.run_task_now(tsk,postpone) - return tsk - def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False,task=None): - if Utils.is_win32: - return - assert(dest) - tsk=inst(env=env or self.env) - tsk.bld=self - tsk.dest=dest - tsk.path=cwd or self.path - tsk.source=[] - tsk.task=task - tsk.link=src - tsk.relative_trick=relative_trick - tsk.exec_task=tsk.exec_symlink_as - if add:self.add_to_group(tsk) - self.run_task_now(tsk,postpone) - return tsk -class UninstallContext(InstallContext): - '''removes the targets installed''' - cmd='uninstall' - def __init__(self,**kw): - super(UninstallContext,self).__init__(**kw) - self.is_install=UNINSTALL - def rm_empty_dirs(self,tgt): - while tgt: - tgt=os.path.dirname(tgt) - try: - os.rmdir(tgt) - except OSError: - break - def do_install(self,src,tgt,**kw): - if not self.progress_bar: - Logs.info('- remove %s'%tgt) - self.uninstall.append(tgt) - try: - os.remove(tgt) - except OSError as e: - if e.errno!=errno.ENOENT: - if not getattr(self,'uninstall_error',None): - self.uninstall_error=True - Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)') - if Logs.verbose>1: - Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno)) - self.rm_empty_dirs(tgt) - def do_link(self,src,tgt,**kw): - try: - if not self.progress_bar: - Logs.info('- remove %s'%tgt) - os.remove(tgt) - except OSError: - pass - self.rm_empty_dirs(tgt) - def execute(self): - try: - def runnable_status(self): - return Task.SKIP_ME - setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status) - setattr(Task.Task,'runnable_status',runnable_status) - super(UninstallContext,self).execute() - finally: - setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back) -class CleanContext(BuildContext): - '''cleans the project''' - cmd='clean' - def execute(self): - self.restore() - if not self.all_envs: - self.load_envs() - self.recurse([self.run_dir]) - try: - self.clean() - finally: - self.store() - def clean(self): - Logs.debug('build: clean called') - if self.bldnode!=self.srcnode: - lst=[] - for e in self.all_envs.values(): - lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES]) - for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True): - if n in lst: - continue - n.delete() - self.root.children={} - for v in'node_deps task_sigs raw_deps'.split(): - setattr(self,v,{}) -class ListContext(BuildContext): - '''lists the targets to execute''' - cmd='list' - def execute(self): - self.restore() - if not self.all_envs: - self.load_envs() - self.recurse([self.run_dir]) - self.pre_build() - self.timer=Utils.Timer() - for g in self.groups: - for tg in g: - try: - f=tg.post - except AttributeError: - pass - else: - f() - try: - self.get_tgen_by_name('') - except Exception: - pass - lst=list(self.task_gen_cache_names.keys()) - lst.sort() - for k in lst: - Logs.pprint('GREEN',k) -class StepContext(BuildContext): - '''executes tasks in a step-by-step fashion, for debugging''' - cmd='step' - def __init__(self,**kw): - super(StepContext,self).__init__(**kw) - self.files=Options.options.files - def compile(self): - if not self.files: - Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"') - BuildContext.compile(self) - return - targets=None - if self.targets and self.targets!='*': - targets=self.targets.split(',') - for g in self.groups: - for tg in g: - if targets and tg.name not in targets: - continue - try: - f=tg.post - except AttributeError: - pass - else: - f() - for pat in self.files.split(','): - matcher=self.get_matcher(pat) - for tg in g: - if isinstance(tg,Task.TaskBase): - lst=[tg] - else: - lst=tg.tasks - for tsk in lst: - do_exec=False - for node in getattr(tsk,'inputs',[]): - if matcher(node,output=False): - do_exec=True - break - for node in getattr(tsk,'outputs',[]): - if matcher(node,output=True): - do_exec=True - break - if do_exec: - ret=tsk.run() - Logs.info('%s -> exit %r'%(str(tsk),ret)) - def get_matcher(self,pat): - inn=True - out=True - if pat.startswith('in:'): - out=False - pat=pat.replace('in:','') - elif pat.startswith('out:'): - inn=False - pat=pat.replace('out:','') - anode=self.root.find_node(pat) - pattern=None - if not anode: - if not pat.startswith('^'): - pat='^.+?%s'%pat - if not pat.endswith('$'): - pat='%s$'%pat - pattern=re.compile(pat) - def match(node,output): - if output==True and not out: - return False - if output==False and not inn: - return False - if anode: - return anode==node - else: - return pattern.match(node.abspath()) - return match diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py deleted file mode 100644 index 3a86efec..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ConfigSet.py +++ /dev/null @@ -1,153 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import copy,re,os -from waflib import Logs,Utils -re_imp=re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$',re.M) -class ConfigSet(object): - __slots__=('table','parent') - def __init__(self,filename=None): - self.table={} - if filename: - self.load(filename) - def __contains__(self,key): - if key in self.table:return True - try:return self.parent.__contains__(key) - except AttributeError:return False - def keys(self): - keys=set() - cur=self - while cur: - keys.update(cur.table.keys()) - cur=getattr(cur,'parent',None) - keys=list(keys) - keys.sort() - return keys - def __str__(self): - return"\n".join(["%r %r"%(x,self.__getitem__(x))for x in self.keys()]) - def __getitem__(self,key): - try: - while 1: - x=self.table.get(key,None) - if not x is None: - return x - self=self.parent - except AttributeError: - return[] - def __setitem__(self,key,value): - self.table[key]=value - def __delitem__(self,key): - self[key]=[] - def __getattr__(self,name): - if name in self.__slots__: - return object.__getattr__(self,name) - else: - return self[name] - def __setattr__(self,name,value): - if name in self.__slots__: - object.__setattr__(self,name,value) - else: - self[name]=value - def __delattr__(self,name): - if name in self.__slots__: - object.__delattr__(self,name) - else: - del self[name] - def derive(self): - newenv=ConfigSet() - newenv.parent=self - return newenv - def detach(self): - tbl=self.get_merged_dict() - try: - delattr(self,'parent') - except AttributeError: - pass - else: - keys=tbl.keys() - for x in keys: - tbl[x]=copy.deepcopy(tbl[x]) - self.table=tbl - return self - def get_flat(self,key): - s=self[key] - if isinstance(s,str):return s - return' '.join(s) - def _get_list_value_for_modification(self,key): - try: - value=self.table[key] - except KeyError: - try:value=self.parent[key] - except AttributeError:value=[] - if isinstance(value,list): - value=value[:] - else: - value=[value] - else: - if not isinstance(value,list): - value=[value] - self.table[key]=value - return value - def append_value(self,var,val): - if isinstance(val,str): - val=[val] - current_value=self._get_list_value_for_modification(var) - current_value.extend(val) - def prepend_value(self,var,val): - if isinstance(val,str): - val=[val] - self.table[var]=val+self._get_list_value_for_modification(var) - def append_unique(self,var,val): - if isinstance(val,str): - val=[val] - current_value=self._get_list_value_for_modification(var) - for x in val: - if x not in current_value: - current_value.append(x) - def get_merged_dict(self): - table_list=[] - env=self - while 1: - table_list.insert(0,env.table) - try:env=env.parent - except AttributeError:break - merged_table={} - for table in table_list: - merged_table.update(table) - return merged_table - def store(self,filename): - try: - os.makedirs(os.path.split(filename)[0]) - except OSError: - pass - buf=[] - merged_table=self.get_merged_dict() - keys=list(merged_table.keys()) - keys.sort() - try: - fun=ascii - except NameError: - fun=repr - for k in keys: - if k!='undo_stack': - buf.append('%s = %s\n'%(k,fun(merged_table[k]))) - Utils.writef(filename,''.join(buf)) - def load(self,filename): - tbl=self.table - code=Utils.readf(filename,m='rU') - for m in re_imp.finditer(code): - g=m.group - tbl[g(2)]=eval(g(3)) - Logs.debug('env: %s'%str(self.table)) - def update(self,d): - for k,v in d.items(): - self[k]=v - def stash(self): - orig=self.table - tbl=self.table=self.table.copy() - for x in tbl.keys(): - tbl[x]=copy.deepcopy(tbl[x]) - self.undo_stack=self.undo_stack+[orig] - def revert(self): - self.table=self.undo_stack.pop(-1) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py deleted file mode 100644 index 41d77206..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Configure.py +++ /dev/null @@ -1,379 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,shlex,sys,time,re,shutil -from waflib import ConfigSet,Utils,Options,Logs,Context,Build,Errors -BREAK='break' -CONTINUE='continue' -WAF_CONFIG_LOG='config.log' -autoconfig=False -conf_template='''# project %(app)s configured on %(now)s by -# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s) -# using %(args)s -#''' -class ConfigurationContext(Context.Context): - '''configures the project''' - cmd='configure' - error_handlers=[] - def __init__(self,**kw): - super(ConfigurationContext,self).__init__(**kw) - self.environ=dict(os.environ) - self.all_envs={} - self.top_dir=None - self.out_dir=None - self.tools=[] - self.hash=0 - self.files=[] - self.tool_cache=[] - self.setenv('') - def setenv(self,name,env=None): - if name not in self.all_envs or env: - if not env: - env=ConfigSet.ConfigSet() - self.prepare_env(env) - else: - env=env.derive() - self.all_envs[name]=env - self.variant=name - def get_env(self): - return self.all_envs[self.variant] - def set_env(self,val): - self.all_envs[self.variant]=val - env=property(get_env,set_env) - def init_dirs(self): - top=self.top_dir - if not top: - top=Options.options.top - if not top: - top=getattr(Context.g_module,Context.TOP,None) - if not top: - top=self.path.abspath() - top=os.path.abspath(top) - self.srcnode=(os.path.isabs(top)and self.root or self.path).find_dir(top) - assert(self.srcnode) - out=self.out_dir - if not out: - out=Options.options.out - if not out: - out=getattr(Context.g_module,Context.OUT,None) - if not out: - out=Options.lockfile.replace('.lock-waf_%s_'%sys.platform,'').replace('.lock-waf','') - out=os.path.realpath(out) - self.bldnode=(os.path.isabs(out)and self.root or self.path).make_node(out) - self.bldnode.mkdir() - if not os.path.isdir(self.bldnode.abspath()): - conf.fatal('Could not create the build directory %s'%self.bldnode.abspath()) - def execute(self): - self.init_dirs() - self.cachedir=self.bldnode.make_node(Build.CACHE_DIR) - self.cachedir.mkdir() - path=os.path.join(self.bldnode.abspath(),WAF_CONFIG_LOG) - self.logger=Logs.make_logger(path,'cfg') - app=getattr(Context.g_module,'APPNAME','') - if app: - ver=getattr(Context.g_module,'VERSION','') - if ver: - app="%s (%s)"%(app,ver) - params={'now':time.ctime(),'pyver':sys.hexversion,'systype':sys.platform,'args':" ".join(sys.argv),'wafver':Context.WAFVERSION,'abi':Context.ABI,'app':app} - self.to_log(conf_template%params) - self.msg('Setting top to',self.srcnode.abspath()) - self.msg('Setting out to',self.bldnode.abspath()) - if id(self.srcnode)==id(self.bldnode): - Logs.warn('Setting top == out (remember to use "update_outputs")') - elif id(self.path)!=id(self.srcnode): - if self.srcnode.is_child_of(self.path): - Logs.warn('Are you certain that you do not want to set top="." ?') - super(ConfigurationContext,self).execute() - self.store() - Context.top_dir=self.srcnode.abspath() - Context.out_dir=self.bldnode.abspath() - env=ConfigSet.ConfigSet() - env['argv']=sys.argv - env['options']=Options.options.__dict__ - env.run_dir=Context.run_dir - env.top_dir=Context.top_dir - env.out_dir=Context.out_dir - env['hash']=self.hash - env['files']=self.files - env['environ']=dict(self.environ) - if not self.env.NO_LOCK_IN_RUN and not getattr(Options.options,'no_lock_in_run'): - env.store(os.path.join(Context.run_dir,Options.lockfile)) - if not self.env.NO_LOCK_IN_TOP and not getattr(Options.options,'no_lock_in_top'): - env.store(os.path.join(Context.top_dir,Options.lockfile)) - if not self.env.NO_LOCK_IN_OUT and not getattr(Options.options,'no_lock_in_out'): - env.store(os.path.join(Context.out_dir,Options.lockfile)) - def prepare_env(self,env): - if not env.PREFIX: - if Options.options.prefix or Utils.is_win32: - env.PREFIX=Utils.sane_path(Options.options.prefix) - else: - env.PREFIX='' - if not env.BINDIR: - if Options.options.bindir: - env.BINDIR=Utils.sane_path(Options.options.bindir) - else: - env.BINDIR=Utils.subst_vars('${PREFIX}/bin',env) - if not env.LIBDIR: - if Options.options.libdir: - env.LIBDIR=Utils.sane_path(Options.options.libdir) - else: - env.LIBDIR=Utils.subst_vars('${PREFIX}/lib%s'%Utils.lib64(),env) - def store(self): - n=self.cachedir.make_node('build.config.py') - n.write('version = 0x%x\ntools = %r\n'%(Context.HEXVERSION,self.tools)) - if not self.all_envs: - self.fatal('nothing to store in the configuration context!') - for key in self.all_envs: - tmpenv=self.all_envs[key] - tmpenv.store(os.path.join(self.cachedir.abspath(),key+Build.CACHE_SUFFIX)) - def load(self,input,tooldir=None,funs=None,with_sys_path=True): - tools=Utils.to_list(input) - if tooldir:tooldir=Utils.to_list(tooldir) - for tool in tools: - mag=(tool,id(self.env),tooldir,funs) - if mag in self.tool_cache: - self.to_log('(tool %s is already loaded, skipping)'%tool) - continue - self.tool_cache.append(mag) - module=None - try: - module=Context.load_tool(tool,tooldir,ctx=self,with_sys_path=with_sys_path) - except ImportError as e: - self.fatal('Could not load the Waf tool %r from %r\n%s'%(tool,sys.path,e)) - except Exception as e: - self.to_log('imp %r (%r & %r)'%(tool,tooldir,funs)) - self.to_log(Utils.ex_stack()) - raise - if funs is not None: - self.eval_rules(funs) - else: - func=getattr(module,'configure',None) - if func: - if type(func)is type(Utils.readf):func(self) - else:self.eval_rules(func) - self.tools.append({'tool':tool,'tooldir':tooldir,'funs':funs}) - def post_recurse(self,node): - super(ConfigurationContext,self).post_recurse(node) - self.hash=Utils.h_list((self.hash,node.read('rb'))) - self.files.append(node.abspath()) - def eval_rules(self,rules): - self.rules=Utils.to_list(rules) - for x in self.rules: - f=getattr(self,x) - if not f:self.fatal("No such method '%s'."%x) - try: - f() - except Exception as e: - ret=self.err_handler(x,e) - if ret==BREAK: - break - elif ret==CONTINUE: - continue - else: - raise - def err_handler(self,fun,error): - pass -def conf(f): - def fun(*k,**kw): - mandatory=True - if'mandatory'in kw: - mandatory=kw['mandatory'] - del kw['mandatory'] - try: - return f(*k,**kw) - except Errors.ConfigurationError: - if mandatory: - raise - fun.__name__=f.__name__ - setattr(ConfigurationContext,f.__name__,fun) - setattr(Build.BuildContext,f.__name__,fun) - return f -@conf -def add_os_flags(self,var,dest=None,dup=True): - try: - flags=shlex.split(self.environ[var]) - except KeyError: - return - if dup or''.join(flags)not in''.join(Utils.to_list(self.env[dest or var])): - self.env.append_value(dest or var,flags) -@conf -def cmd_to_list(self,cmd): - if isinstance(cmd,str)and cmd.find(' '): - try: - os.stat(cmd) - except OSError: - return shlex.split(cmd) - else: - return[cmd] - return cmd -@conf -def check_waf_version(self,mini='1.7.99',maxi='1.9.0',**kw): - self.start_msg('Checking for waf version in %s-%s'%(str(mini),str(maxi)),**kw) - ver=Context.HEXVERSION - if Utils.num2ver(mini)>ver: - self.fatal('waf version should be at least %r (%r found)'%(Utils.num2ver(mini),ver)) - if Utils.num2ver(maxi) %r'%(filename,path_list,var,ret)) - if not ret: - self.fatal(kw.get('errmsg','')or'Could not find the program %r'%filename) - interpreter=kw.get('interpreter',None) - if interpreter is None: - if not Utils.check_exe(ret[0],env=environ): - self.fatal('Program %r is not executable'%ret) - self.env[var]=ret - else: - self.env[var]=self.env[interpreter]+ret - return ret -@conf -def find_binary(self,filenames,exts,paths): - for f in filenames: - for ext in exts: - exe_name=f+ext - if os.path.isabs(exe_name): - if os.path.isfile(exe_name): - return exe_name - else: - for path in paths: - x=os.path.expanduser(os.path.join(path,exe_name)) - if os.path.isfile(x): - return x - return None -@conf -def run_build(self,*k,**kw): - lst=[str(v)for(p,v)in kw.items()if p!='env'] - h=Utils.h_list(lst) - dir=self.bldnode.abspath()+os.sep+(not Utils.is_win32 and'.'or'')+'conf_check_'+Utils.to_hex(h) - try: - os.makedirs(dir) - except OSError: - pass - try: - os.stat(dir) - except OSError: - self.fatal('cannot use the configuration test folder %r'%dir) - cachemode=getattr(Options.options,'confcache',None) - if cachemode==1: - try: - proj=ConfigSet.ConfigSet(os.path.join(dir,'cache_run_build')) - except OSError: - pass - except IOError: - pass - else: - ret=proj['cache_run_build'] - if isinstance(ret,str)and ret.startswith('Test does not build'): - self.fatal(ret) - return ret - bdir=os.path.join(dir,'testbuild') - if not os.path.exists(bdir): - os.makedirs(bdir) - self.test_bld=bld=Build.BuildContext(top_dir=dir,out_dir=bdir) - bld.init_dirs() - bld.progress_bar=0 - bld.targets='*' - bld.logger=self.logger - bld.all_envs.update(self.all_envs) - bld.env=kw['env'] - bld.kw=kw - bld.conf=self - kw['build_fun'](bld) - ret=-1 - try: - try: - bld.compile() - except Errors.WafError: - ret='Test does not build: %s'%Utils.ex_stack() - self.fatal(ret) - else: - ret=getattr(bld,'retval',0) - finally: - if cachemode==1: - proj=ConfigSet.ConfigSet() - proj['cache_run_build']=ret - proj.store(os.path.join(dir,'cache_run_build')) - else: - shutil.rmtree(dir) - return ret -@conf -def ret_msg(self,msg,args): - if isinstance(msg,str): - return msg - return msg(args) -@conf -def test(self,*k,**kw): - if not'env'in kw: - kw['env']=self.env.derive() - if kw.get('validate',None): - kw['validate'](kw) - self.start_msg(kw['msg'],**kw) - ret=None - try: - ret=self.run_build(*k,**kw) - except self.errors.ConfigurationError: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - if Logs.verbose>1: - raise - else: - self.fatal('The configuration failed') - else: - kw['success']=ret - if kw.get('post_check',None): - ret=kw['post_check'](kw) - if ret: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - self.fatal('The configuration failed %r'%ret) - else: - self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) - return ret diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py deleted file mode 100644 index 89cead61..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Context.py +++ /dev/null @@ -1,394 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re,imp,sys -from waflib import Utils,Errors,Logs -import waflib.Node -HEXVERSION=0x1081200 -WAFVERSION="1.8.18" -WAFREVISION="62efd566280f494cdf4d7940aac85f2b59ef660b" -ABI=98 -DBFILE='.wafpickle-%s-%d-%d'%(sys.platform,sys.hexversion,ABI) -APPNAME='APPNAME' -VERSION='VERSION' -TOP='top' -OUT='out' -WSCRIPT_FILE='wscript' -launch_dir='' -run_dir='' -top_dir='' -out_dir='' -waf_dir='' -local_repo='' -remote_repo='https://raw.githubusercontent.com/waf-project/waf/master/' -remote_locs=['waflib/extras','waflib/Tools'] -g_module=None -STDOUT=1 -STDERR=-1 -BOTH=0 -classes=[] -def create_context(cmd_name,*k,**kw): - global classes - for x in classes: - if x.cmd==cmd_name: - return x(*k,**kw) - ctx=Context(*k,**kw) - ctx.fun=cmd_name - return ctx -class store_context(type): - def __init__(cls,name,bases,dict): - super(store_context,cls).__init__(name,bases,dict) - name=cls.__name__ - if name=='ctx'or name=='Context': - return - try: - cls.cmd - except AttributeError: - raise Errors.WafError('Missing command for the context class %r (cmd)'%name) - if not getattr(cls,'fun',None): - cls.fun=cls.cmd - global classes - classes.insert(0,cls) -ctx=store_context('ctx',(object,),{}) -class Context(ctx): - errors=Errors - tools={} - def __init__(self,**kw): - try: - rd=kw['run_dir'] - except KeyError: - global run_dir - rd=run_dir - self.node_class=type("Nod3",(waflib.Node.Node,),{}) - self.node_class.__module__="waflib.Node" - self.node_class.ctx=self - self.root=self.node_class('',None) - self.cur_script=None - self.path=self.root.find_dir(rd) - self.stack_path=[] - self.exec_dict={'ctx':self,'conf':self,'bld':self,'opt':self} - self.logger=None - def __hash__(self): - return id(self) - def finalize(self): - try: - logger=self.logger - except AttributeError: - pass - else: - Logs.free_logger(logger) - delattr(self,'logger') - def load(self,tool_list,*k,**kw): - tools=Utils.to_list(tool_list) - path=Utils.to_list(kw.get('tooldir','')) - with_sys_path=kw.get('with_sys_path',True) - for t in tools: - module=load_tool(t,path,with_sys_path=with_sys_path) - fun=getattr(module,kw.get('name',self.fun),None) - if fun: - fun(self) - def execute(self): - global g_module - self.recurse([os.path.dirname(g_module.root_path)]) - def pre_recurse(self,node): - self.stack_path.append(self.cur_script) - self.cur_script=node - self.path=node.parent - def post_recurse(self,node): - self.cur_script=self.stack_path.pop() - if self.cur_script: - self.path=self.cur_script.parent - def recurse(self,dirs,name=None,mandatory=True,once=True,encoding=None): - try: - cache=self.recurse_cache - except AttributeError: - cache=self.recurse_cache={} - for d in Utils.to_list(dirs): - if not os.path.isabs(d): - d=os.path.join(self.path.abspath(),d) - WSCRIPT=os.path.join(d,WSCRIPT_FILE) - WSCRIPT_FUN=WSCRIPT+'_'+(name or self.fun) - node=self.root.find_node(WSCRIPT_FUN) - if node and(not once or node not in cache): - cache[node]=True - self.pre_recurse(node) - try: - function_code=node.read('rU',encoding) - exec(compile(function_code,node.abspath(),'exec'),self.exec_dict) - finally: - self.post_recurse(node) - elif not node: - node=self.root.find_node(WSCRIPT) - tup=(node,name or self.fun) - if node and(not once or tup not in cache): - cache[tup]=True - self.pre_recurse(node) - try: - wscript_module=load_module(node.abspath(),encoding=encoding) - user_function=getattr(wscript_module,(name or self.fun),None) - if not user_function: - if not mandatory: - continue - raise Errors.WafError('No function %s defined in %s'%(name or self.fun,node.abspath())) - user_function(self) - finally: - self.post_recurse(node) - elif not node: - if not mandatory: - continue - try: - os.listdir(d) - except OSError: - raise Errors.WafError('Cannot read the folder %r'%d) - raise Errors.WafError('No wscript file in directory %s'%d) - def exec_command(self,cmd,**kw): - subprocess=Utils.subprocess - kw['shell']=isinstance(cmd,str) - Logs.debug('runner: %r'%(cmd,)) - Logs.debug('runner_env: kw=%s'%kw) - if self.logger: - self.logger.info(cmd) - if'stdout'not in kw: - kw['stdout']=subprocess.PIPE - if'stderr'not in kw: - kw['stderr']=subprocess.PIPE - if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!"%cmd[0]) - wargs={} - if'timeout'in kw: - if kw['timeout']is not None: - wargs['timeout']=kw['timeout'] - del kw['timeout'] - if'input'in kw: - if kw['input']: - wargs['input']=kw['input'] - kw['stdin']=Utils.subprocess.PIPE - del kw['input'] - try: - if kw['stdout']or kw['stderr']: - p=subprocess.Popen(cmd,**kw) - (out,err)=p.communicate(**wargs) - ret=p.returncode - else: - out,err=(None,None) - ret=subprocess.Popen(cmd,**kw).wait(**wargs) - except Exception as e: - raise Errors.WafError('Execution failure: %s'%str(e),ex=e) - if out: - if not isinstance(out,str): - out=out.decode(sys.stdout.encoding or'iso8859-1') - if self.logger: - self.logger.debug('out: %s'%out) - else: - Logs.info(out,extra={'stream':sys.stdout,'c1':''}) - if err: - if not isinstance(err,str): - err=err.decode(sys.stdout.encoding or'iso8859-1') - if self.logger: - self.logger.error('err: %s'%err) - else: - Logs.info(err,extra={'stream':sys.stderr,'c1':''}) - return ret - def cmd_and_log(self,cmd,**kw): - subprocess=Utils.subprocess - kw['shell']=isinstance(cmd,str) - Logs.debug('runner: %r'%(cmd,)) - if'quiet'in kw: - quiet=kw['quiet'] - del kw['quiet'] - else: - quiet=None - if'output'in kw: - to_ret=kw['output'] - del kw['output'] - else: - to_ret=STDOUT - if Logs.verbose and not kw['shell']and not Utils.check_exe(cmd[0]): - raise Errors.WafError("Program %s not found!"%cmd[0]) - kw['stdout']=kw['stderr']=subprocess.PIPE - if quiet is None: - self.to_log(cmd) - wargs={} - if'timeout'in kw: - if kw['timeout']is not None: - wargs['timeout']=kw['timeout'] - del kw['timeout'] - if'input'in kw: - if kw['input']: - wargs['input']=kw['input'] - kw['stdin']=Utils.subprocess.PIPE - del kw['input'] - try: - p=subprocess.Popen(cmd,**kw) - (out,err)=p.communicate(**wargs) - except Exception as e: - raise Errors.WafError('Execution failure: %s'%str(e),ex=e) - if not isinstance(out,str): - out=out.decode(sys.stdout.encoding or'iso8859-1') - if not isinstance(err,str): - err=err.decode(sys.stdout.encoding or'iso8859-1') - if out and quiet!=STDOUT and quiet!=BOTH: - self.to_log('out: %s'%out) - if err and quiet!=STDERR and quiet!=BOTH: - self.to_log('err: %s'%err) - if p.returncode: - e=Errors.WafError('Command %r returned %r'%(cmd,p.returncode)) - e.returncode=p.returncode - e.stderr=err - e.stdout=out - raise e - if to_ret==BOTH: - return(out,err) - elif to_ret==STDERR: - return err - return out - def fatal(self,msg,ex=None): - if self.logger: - self.logger.info('from %s: %s'%(self.path.abspath(),msg)) - try: - msg='%s\n(complete log in %s)'%(msg,self.logger.handlers[0].baseFilename) - except Exception: - pass - raise self.errors.ConfigurationError(msg,ex=ex) - def to_log(self,msg): - if not msg: - return - if self.logger: - self.logger.info(msg) - else: - sys.stderr.write(str(msg)) - sys.stderr.flush() - def msg(self,*k,**kw): - try: - msg=kw['msg'] - except KeyError: - msg=k[0] - self.start_msg(msg,**kw) - try: - result=kw['result'] - except KeyError: - result=k[1] - color=kw.get('color',None) - if not isinstance(color,str): - color=result and'GREEN'or'YELLOW' - self.end_msg(result,color,**kw) - def start_msg(self,*k,**kw): - if kw.get('quiet',None): - return - msg=kw.get('msg',None)or k[0] - try: - if self.in_msg: - self.in_msg+=1 - return - except AttributeError: - self.in_msg=0 - self.in_msg+=1 - try: - self.line_just=max(self.line_just,len(msg)) - except AttributeError: - self.line_just=max(40,len(msg)) - for x in(self.line_just*'-',msg): - self.to_log(x) - Logs.pprint('NORMAL',"%s :"%msg.ljust(self.line_just),sep='') - def end_msg(self,*k,**kw): - if kw.get('quiet',None): - return - self.in_msg-=1 - if self.in_msg: - return - result=kw.get('result',None)or k[0] - defcolor='GREEN' - if result==True: - msg='ok' - elif result==False: - msg='not found' - defcolor='YELLOW' - else: - msg=str(result) - self.to_log(msg) - try: - color=kw['color'] - except KeyError: - if len(k)>1 and k[1]in Logs.colors_lst: - color=k[1] - else: - color=defcolor - Logs.pprint(color,msg) - def load_special_tools(self,var,ban=[]): - global waf_dir - if os.path.isdir(waf_dir): - lst=self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var) - for x in lst: - if not x.name in ban: - load_tool(x.name.replace('.py','')) - else: - from zipfile import PyZipFile - waflibs=PyZipFile(waf_dir) - lst=waflibs.namelist() - for x in lst: - if not re.match("waflib/extras/%s"%var.replace("*",".*"),var): - continue - f=os.path.basename(x) - doban=False - for b in ban: - r=b.replace("*",".*") - if re.match(r,f): - doban=True - if not doban: - f=f.replace('.py','') - load_tool(f) -cache_modules={} -def load_module(path,encoding=None): - try: - return cache_modules[path] - except KeyError: - pass - module=imp.new_module(WSCRIPT_FILE) - try: - code=Utils.readf(path,m='rU',encoding=encoding) - except EnvironmentError: - raise Errors.WafError('Could not read the file %r'%path) - module_dir=os.path.dirname(path) - sys.path.insert(0,module_dir) - try:exec(compile(code,path,'exec'),module.__dict__) - finally:sys.path.remove(module_dir) - cache_modules[path]=module - return module -def load_tool(tool,tooldir=None,ctx=None,with_sys_path=True): - if tool=='java': - tool='javaw' - else: - tool=tool.replace('++','xx') - origSysPath=sys.path - if not with_sys_path:sys.path=[] - try: - if tooldir: - assert isinstance(tooldir,list) - sys.path=tooldir+sys.path - try: - __import__(tool) - finally: - for d in tooldir: - sys.path.remove(d) - ret=sys.modules[tool] - Context.tools[tool]=ret - return ret - else: - if not with_sys_path:sys.path.insert(0,waf_dir) - try: - for x in('waflib.Tools.%s','waflib.extras.%s','waflib.%s','%s'): - try: - __import__(x%tool) - break - except ImportError: - x=None - if x is None: - __import__(tool) - finally: - if not with_sys_path:sys.path.remove(waf_dir) - ret=sys.modules[x%tool] - Context.tools[tool]=ret - return ret - finally: - if not with_sys_path:sys.path+=origSysPath diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py deleted file mode 100644 index 3d98c8d2..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Errors.py +++ /dev/null @@ -1,37 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import traceback,sys -class WafError(Exception): - def __init__(self,msg='',ex=None): - self.msg=msg - assert not isinstance(msg,Exception) - self.stack=[] - if ex: - if not msg: - self.msg=str(ex) - if isinstance(ex,WafError): - self.stack=ex.stack - else: - self.stack=traceback.extract_tb(sys.exc_info()[2]) - self.stack+=traceback.extract_stack()[:-1] - self.verbose_msg=''.join(traceback.format_list(self.stack)) - def __str__(self): - return str(self.msg) -class BuildError(WafError): - def __init__(self,error_tasks=[]): - self.tasks=error_tasks - WafError.__init__(self,self.format_error()) - def format_error(self): - lst=['Build failed'] - for tsk in self.tasks: - txt=tsk.format_error() - if txt:lst.append(txt) - return'\n'.join(lst) -class ConfigurationError(WafError): - pass -class TaskRescan(WafError): - pass -class TaskNotReady(WafError): - pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py deleted file mode 100644 index 984ac1bd..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Logs.py +++ /dev/null @@ -1,200 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re,traceback,sys -from waflib import Utils,ansiterm -if not os.environ.get('NOSYNC',False): - if sys.stdout.isatty()and id(sys.stdout)==id(sys.__stdout__): - sys.stdout=ansiterm.AnsiTerm(sys.stdout) - if sys.stderr.isatty()and id(sys.stderr)==id(sys.__stderr__): - sys.stderr=ansiterm.AnsiTerm(sys.stderr) -import logging -LOG_FORMAT=os.environ.get('WAF_LOG_FORMAT','%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s') -HOUR_FORMAT=os.environ.get('WAF_HOUR_FORMAT','%H:%M:%S') -zones='' -verbose=0 -colors_lst={'USE':True,'BOLD':'\x1b[01;1m','RED':'\x1b[01;31m','GREEN':'\x1b[32m','YELLOW':'\x1b[33m','PINK':'\x1b[35m','BLUE':'\x1b[01;34m','CYAN':'\x1b[36m','GREY':'\x1b[37m','NORMAL':'\x1b[0m','cursor_on':'\x1b[?25h','cursor_off':'\x1b[?25l',} -indicator='\r\x1b[K%s%s%s' -try: - unicode -except NameError: - unicode=None -def enable_colors(use): - if use==1: - if not(sys.stderr.isatty()or sys.stdout.isatty()): - use=0 - if Utils.is_win32 and os.name!='java': - term=os.environ.get('TERM','') - else: - term=os.environ.get('TERM','dumb') - if term in('dumb','emacs'): - use=0 - if use>=1: - os.environ['TERM']='vt100' - colors_lst['USE']=use -try: - get_term_cols=ansiterm.get_term_cols -except AttributeError: - def get_term_cols(): - return 80 -get_term_cols.__doc__=""" - Get the console width in characters. - - :return: the number of characters per line - :rtype: int - """ -def get_color(cl): - if not colors_lst['USE']:return'' - return colors_lst.get(cl,'') -class color_dict(object): - def __getattr__(self,a): - return get_color(a) - def __call__(self,a): - return get_color(a) -colors=color_dict() -re_log=re.compile(r'(\w+): (.*)',re.M) -class log_filter(logging.Filter): - def __init__(self,name=None): - pass - def filter(self,rec): - rec.zone=rec.module - if rec.levelno>=logging.INFO: - return True - m=re_log.match(rec.msg) - if m: - rec.zone=m.group(1) - rec.msg=m.group(2) - if zones: - return getattr(rec,'zone','')in zones or'*'in zones - elif not verbose>2: - return False - return True -class log_handler(logging.StreamHandler): - def emit(self,record): - try: - try: - self.stream=record.stream - except AttributeError: - if record.levelno>=logging.WARNING: - record.stream=self.stream=sys.stderr - else: - record.stream=self.stream=sys.stdout - self.emit_override(record) - self.flush() - except(KeyboardInterrupt,SystemExit): - raise - except: - self.handleError(record) - def emit_override(self,record,**kw): - self.terminator=getattr(record,'terminator','\n') - stream=self.stream - if unicode: - msg=self.formatter.format(record) - fs='%s'+self.terminator - try: - if(isinstance(msg,unicode)and getattr(stream,'encoding',None)): - fs=fs.decode(stream.encoding) - try: - stream.write(fs%msg) - except UnicodeEncodeError: - stream.write((fs%msg).encode(stream.encoding)) - else: - stream.write(fs%msg) - except UnicodeError: - stream.write((fs%msg).encode("UTF-8")) - else: - logging.StreamHandler.emit(self,record) -class formatter(logging.Formatter): - def __init__(self): - logging.Formatter.__init__(self,LOG_FORMAT,HOUR_FORMAT) - def format(self,rec): - try: - msg=rec.msg.decode('utf-8') - except Exception: - msg=rec.msg - use=colors_lst['USE'] - if(use==1 and rec.stream.isatty())or use==2: - c1=getattr(rec,'c1',None) - if c1 is None: - c1='' - if rec.levelno>=logging.ERROR: - c1=colors.RED - elif rec.levelno>=logging.WARNING: - c1=colors.YELLOW - elif rec.levelno>=logging.INFO: - c1=colors.GREEN - c2=getattr(rec,'c2',colors.NORMAL) - msg='%s%s%s'%(c1,msg,c2) - else: - msg=msg.replace('\r','\n') - msg=re.sub(r'\x1B\[(K|.*?(m|h|l))','',msg) - if rec.levelno>=logging.INFO: - return msg - rec.msg=msg - rec.c1=colors.PINK - rec.c2=colors.NORMAL - return logging.Formatter.format(self,rec) -log=None -def debug(*k,**kw): - if verbose: - k=list(k) - k[0]=k[0].replace('\n',' ') - global log - log.debug(*k,**kw) -def error(*k,**kw): - global log - log.error(*k,**kw) - if verbose>2: - st=traceback.extract_stack() - if st: - st=st[:-1] - buf=[] - for filename,lineno,name,line in st: - buf.append(' File "%s", line %d, in %s'%(filename,lineno,name)) - if line: - buf.append(' %s'%line.strip()) - if buf:log.error("\n".join(buf)) -def warn(*k,**kw): - global log - log.warn(*k,**kw) -def info(*k,**kw): - global log - log.info(*k,**kw) -def init_log(): - global log - log=logging.getLogger('waflib') - log.handlers=[] - log.filters=[] - hdlr=log_handler() - hdlr.setFormatter(formatter()) - log.addHandler(hdlr) - log.addFilter(log_filter()) - log.setLevel(logging.DEBUG) -def make_logger(path,name): - logger=logging.getLogger(name) - hdlr=logging.FileHandler(path,'w') - formatter=logging.Formatter('%(message)s') - hdlr.setFormatter(formatter) - logger.addHandler(hdlr) - logger.setLevel(logging.DEBUG) - return logger -def make_mem_logger(name,to_log,size=8192): - from logging.handlers import MemoryHandler - logger=logging.getLogger(name) - hdlr=MemoryHandler(size,target=to_log) - formatter=logging.Formatter('%(message)s') - hdlr.setFormatter(formatter) - logger.addHandler(hdlr) - logger.memhandler=hdlr - logger.setLevel(logging.DEBUG) - return logger -def free_logger(logger): - try: - for x in logger.handlers: - x.close() - logger.removeHandler(x) - except Exception: - pass -def pprint(col,msg,label='',sep='\n'): - info("%s%s%s %s"%(colors(col),msg,colors.NORMAL,label),extra={'terminator':sep}) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py deleted file mode 100644 index f7eb365e..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Node.py +++ /dev/null @@ -1,491 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re,sys,shutil -from waflib import Utils,Errors -exclude_regs=''' -**/*~ -**/#*# -**/.#* -**/%*% -**/._* -**/CVS -**/CVS/** -**/.cvsignore -**/SCCS -**/SCCS/** -**/vssver.scc -**/.svn -**/.svn/** -**/BitKeeper -**/.git -**/.git/** -**/.gitignore -**/.bzr -**/.bzrignore -**/.bzr/** -**/.hg -**/.hg/** -**/_MTN -**/_MTN/** -**/.arch-ids -**/{arch} -**/_darcs -**/_darcs/** -**/.intlcache -**/.DS_Store''' -split_path=Utils.split_path -split_path_unix=Utils.split_path_unix -split_path_cygwin=Utils.split_path_cygwin -split_path_win32=Utils.split_path_win32 -class Node(object): - dict_class=dict - __slots__=('name','sig','children','parent','cache_abspath','cache_isdir','cache_sig') - def __init__(self,name,parent): - self.name=name - self.parent=parent - if parent: - if name in parent.children: - raise Errors.WafError('node %s exists in the parent files %r already'%(name,parent)) - parent.children[name]=self - def __setstate__(self,data): - self.name=data[0] - self.parent=data[1] - if data[2]is not None: - self.children=self.dict_class(data[2]) - if data[3]is not None: - self.sig=data[3] - def __getstate__(self): - return(self.name,self.parent,getattr(self,'children',None),getattr(self,'sig',None)) - def __str__(self): - return self.name - def __repr__(self): - return self.abspath() - def __hash__(self): - return id(self) - def __eq__(self,node): - return id(self)==id(node) - def __copy__(self): - raise Errors.WafError('nodes are not supposed to be copied') - def read(self,flags='r',encoding='ISO8859-1'): - return Utils.readf(self.abspath(),flags,encoding) - def write(self,data,flags='w',encoding='ISO8859-1'): - Utils.writef(self.abspath(),data,flags,encoding) - def read_json(self,convert=True,encoding='utf-8'): - import json - object_pairs_hook=None - if convert and sys.hexversion<0x3000000: - try: - _type=unicode - except NameError: - _type=str - def convert(value): - if isinstance(value,list): - return[convert(element)for element in value] - elif isinstance(value,_type): - return str(value) - else: - return value - def object_pairs(pairs): - return dict((str(pair[0]),convert(pair[1]))for pair in pairs) - object_pairs_hook=object_pairs - return json.loads(self.read(encoding=encoding),object_pairs_hook=object_pairs_hook) - def write_json(self,data,pretty=True): - import json - indent=2 - separators=(',',': ') - sort_keys=pretty - newline=os.linesep - if not pretty: - indent=None - separators=(',',':') - newline='' - output=json.dumps(data,indent=indent,separators=separators,sort_keys=sort_keys)+newline - self.write(output,encoding='utf-8') - def chmod(self,val): - os.chmod(self.abspath(),val) - def delete(self): - try: - try: - if hasattr(self,'children'): - shutil.rmtree(self.abspath()) - else: - os.remove(self.abspath()) - except OSError as e: - if os.path.exists(self.abspath()): - raise e - finally: - self.evict() - def evict(self): - del self.parent.children[self.name] - def suffix(self): - k=max(0,self.name.rfind('.')) - return self.name[k:] - def height(self): - d=self - val=-1 - while d: - d=d.parent - val+=1 - return val - def listdir(self): - lst=Utils.listdir(self.abspath()) - lst.sort() - return lst - def mkdir(self): - if getattr(self,'cache_isdir',None): - return - try: - self.parent.mkdir() - except OSError: - pass - if self.name: - try: - os.makedirs(self.abspath()) - except OSError: - pass - if not os.path.isdir(self.abspath()): - raise Errors.WafError('Could not create the directory %s'%self.abspath()) - try: - self.children - except AttributeError: - self.children=self.dict_class() - self.cache_isdir=True - def find_node(self,lst): - if isinstance(lst,str): - lst=[x for x in split_path(lst)if x and x!='.'] - cur=self - for x in lst: - if x=='..': - cur=cur.parent or cur - continue - try: - ch=cur.children - except AttributeError: - cur.children=self.dict_class() - else: - try: - cur=ch[x] - continue - except KeyError: - pass - cur=self.__class__(x,cur) - try: - os.stat(cur.abspath()) - except OSError: - cur.evict() - return None - ret=cur - try: - os.stat(ret.abspath()) - except OSError: - ret.evict() - return None - try: - while not getattr(cur.parent,'cache_isdir',None): - cur=cur.parent - cur.cache_isdir=True - except AttributeError: - pass - return ret - def make_node(self,lst): - if isinstance(lst,str): - lst=[x for x in split_path(lst)if x and x!='.'] - cur=self - for x in lst: - if x=='..': - cur=cur.parent or cur - continue - if getattr(cur,'children',{}): - if x in cur.children: - cur=cur.children[x] - continue - else: - cur.children=self.dict_class() - cur=self.__class__(x,cur) - return cur - def search_node(self,lst): - if isinstance(lst,str): - lst=[x for x in split_path(lst)if x and x!='.'] - cur=self - for x in lst: - if x=='..': - cur=cur.parent or cur - else: - try: - cur=cur.children[x] - except(AttributeError,KeyError): - return None - return cur - def path_from(self,node): - c1=self - c2=node - c1h=c1.height() - c2h=c2.height() - lst=[] - up=0 - while c1h>c2h: - lst.append(c1.name) - c1=c1.parent - c1h-=1 - while c2h>c1h: - up+=1 - c2=c2.parent - c2h-=1 - while id(c1)!=id(c2): - lst.append(c1.name) - up+=1 - c1=c1.parent - c2=c2.parent - if c1.parent: - for i in range(up): - lst.append('..') - else: - if lst and not Utils.is_win32: - lst.append('') - lst.reverse() - return os.sep.join(lst)or'.' - def abspath(self): - try: - return self.cache_abspath - except AttributeError: - pass - if not self.parent: - val=os.sep - elif not self.parent.name: - val=os.sep+self.name - else: - val=self.parent.abspath()+os.sep+self.name - self.cache_abspath=val - return val - if Utils.is_win32: - def abspath(self): - try: - return self.cache_abspath - except AttributeError: - pass - if not self.parent: - val='' - elif not self.parent.name: - val=self.name+os.sep - else: - val=self.parent.abspath().rstrip(os.sep)+os.sep+self.name - self.cache_abspath=val - return val - def is_child_of(self,node): - p=self - diff=self.height()-node.height() - while diff>0: - diff-=1 - p=p.parent - return id(p)==id(node) - def ant_iter(self,accept=None,maxdepth=25,pats=[],dir=False,src=True,remove=True): - dircont=self.listdir() - dircont.sort() - try: - lst=set(self.children.keys()) - except AttributeError: - self.children=self.dict_class() - else: - if remove: - for x in lst-set(dircont): - self.children[x].evict() - for name in dircont: - npats=accept(name,pats) - if npats and npats[0]: - accepted=[]in npats[0] - node=self.make_node([name]) - isdir=os.path.isdir(node.abspath()) - if accepted: - if isdir: - if dir: - yield node - else: - if src: - yield node - if getattr(node,'cache_isdir',None)or isdir: - node.cache_isdir=True - if maxdepth: - for k in node.ant_iter(accept=accept,maxdepth=maxdepth-1,pats=npats,dir=dir,src=src,remove=remove): - yield k - raise StopIteration - def ant_glob(self,*k,**kw): - src=kw.get('src',True) - dir=kw.get('dir',False) - excl=kw.get('excl',exclude_regs) - incl=k and k[0]or kw.get('incl','**') - reflags=kw.get('ignorecase',0)and re.I - def to_pat(s): - lst=Utils.to_list(s) - ret=[] - for x in lst: - x=x.replace('\\','/').replace('//','/') - if x.endswith('/'): - x+='**' - lst2=x.split('/') - accu=[] - for k in lst2: - if k=='**': - accu.append(k) - else: - k=k.replace('.','[.]').replace('*','.*').replace('?','.').replace('+','\\+') - k='^%s$'%k - try: - accu.append(re.compile(k,flags=reflags)) - except Exception as e: - raise Errors.WafError("Invalid pattern: %s"%k,e) - ret.append(accu) - return ret - def filtre(name,nn): - ret=[] - for lst in nn: - if not lst: - pass - elif lst[0]=='**': - ret.append(lst) - if len(lst)>1: - if lst[1].match(name): - ret.append(lst[2:]) - else: - ret.append([]) - elif lst[0].match(name): - ret.append(lst[1:]) - return ret - def accept(name,pats): - nacc=filtre(name,pats[0]) - nrej=filtre(name,pats[1]) - if[]in nrej: - nacc=[] - return[nacc,nrej] - ret=[x for x in self.ant_iter(accept=accept,pats=[to_pat(incl),to_pat(excl)],maxdepth=kw.get('maxdepth',25),dir=dir,src=src,remove=kw.get('remove',True))] - if kw.get('flat',False): - return' '.join([x.path_from(self)for x in ret]) - return ret - def is_src(self): - cur=self - x=id(self.ctx.srcnode) - y=id(self.ctx.bldnode) - while cur.parent: - if id(cur)==y: - return False - if id(cur)==x: - return True - cur=cur.parent - return False - def is_bld(self): - cur=self - y=id(self.ctx.bldnode) - while cur.parent: - if id(cur)==y: - return True - cur=cur.parent - return False - def get_src(self): - cur=self - x=id(self.ctx.srcnode) - y=id(self.ctx.bldnode) - lst=[] - while cur.parent: - if id(cur)==y: - lst.reverse() - return self.ctx.srcnode.make_node(lst) - if id(cur)==x: - return self - lst.append(cur.name) - cur=cur.parent - return self - def get_bld(self): - cur=self - x=id(self.ctx.srcnode) - y=id(self.ctx.bldnode) - lst=[] - while cur.parent: - if id(cur)==y: - return self - if id(cur)==x: - lst.reverse() - return self.ctx.bldnode.make_node(lst) - lst.append(cur.name) - cur=cur.parent - lst.reverse() - if lst and Utils.is_win32 and len(lst[0])==2 and lst[0].endswith(':'): - lst[0]=lst[0][0] - return self.ctx.bldnode.make_node(['__root__']+lst) - def find_resource(self,lst): - if isinstance(lst,str): - lst=[x for x in split_path(lst)if x and x!='.'] - node=self.get_bld().search_node(lst) - if not node: - self=self.get_src() - node=self.find_node(lst) - if node: - if os.path.isdir(node.abspath()): - return None - return node - def find_or_declare(self,lst): - if isinstance(lst,str): - lst=[x for x in split_path(lst)if x and x!='.'] - node=self.get_bld().search_node(lst) - if node: - if not os.path.isfile(node.abspath()): - node.sig=None - node.parent.mkdir() - return node - self=self.get_src() - node=self.find_node(lst) - if node: - if not os.path.isfile(node.abspath()): - node.sig=None - node.parent.mkdir() - return node - node=self.get_bld().make_node(lst) - node.parent.mkdir() - return node - def find_dir(self,lst): - if isinstance(lst,str): - lst=[x for x in split_path(lst)if x and x!='.'] - node=self.find_node(lst) - try: - if not os.path.isdir(node.abspath()): - return None - except(OSError,AttributeError): - return None - return node - def change_ext(self,ext,ext_in=None): - name=self.name - if ext_in is None: - k=name.rfind('.') - if k>=0: - name=name[:k]+ext - else: - name=name+ext - else: - name=name[:-len(ext_in)]+ext - return self.parent.find_or_declare([name]) - def bldpath(self): - return self.path_from(self.ctx.bldnode) - def srcpath(self): - return self.path_from(self.ctx.srcnode) - def relpath(self): - cur=self - x=id(self.ctx.bldnode) - while cur.parent: - if id(cur)==x: - return self.bldpath() - cur=cur.parent - return self.srcpath() - def bld_dir(self): - return self.parent.bldpath() - def get_bld_sig(self): - try: - return self.cache_sig - except AttributeError: - pass - if not self.is_bld()or self.ctx.bldnode is self.ctx.srcnode: - self.sig=Utils.h_file(self.abspath()) - self.cache_sig=ret=self.sig - return ret -pickle_lock=Utils.threading.Lock() -class Nod3(Node): - pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py deleted file mode 100644 index 5101f5fc..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Options.py +++ /dev/null @@ -1,147 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,tempfile,optparse,sys,re -from waflib import Logs,Utils,Context -cmds='distclean configure build install clean uninstall check dist distcheck'.split() -options={} -commands=[] -envvars=[] -lockfile=os.environ.get('WAFLOCK','.lock-waf_%s_build'%sys.platform) -platform=Utils.unversioned_sys_platform() -class opt_parser(optparse.OptionParser): - def __init__(self,ctx): - optparse.OptionParser.__init__(self,conflict_handler="resolve",version='waf %s (%s)'%(Context.WAFVERSION,Context.WAFREVISION)) - self.formatter.width=Logs.get_term_cols() - self.ctx=ctx - def print_usage(self,file=None): - return self.print_help(file) - def get_usage(self): - cmds_str={} - for cls in Context.classes: - if not cls.cmd or cls.cmd=='options'or cls.cmd.startswith('_'): - continue - s=cls.__doc__ or'' - cmds_str[cls.cmd]=s - if Context.g_module: - for(k,v)in Context.g_module.__dict__.items(): - if k in('options','init','shutdown'): - continue - if type(v)is type(Context.create_context): - if v.__doc__ and not k.startswith('_'): - cmds_str[k]=v.__doc__ - just=0 - for k in cmds_str: - just=max(just,len(k)) - lst=[' %s: %s'%(k.ljust(just),v)for(k,v)in cmds_str.items()] - lst.sort() - ret='\n'.join(lst) - return'''waf [commands] [options] - -Main commands (example: ./waf build -j4) -%s -'''%ret -class OptionsContext(Context.Context): - cmd='options' - fun='options' - def __init__(self,**kw): - super(OptionsContext,self).__init__(**kw) - self.parser=opt_parser(self) - self.option_groups={} - jobs=self.jobs() - p=self.add_option - color=os.environ.get('NOCOLOR','')and'no'or'auto' - p('-c','--color',dest='colors',default=color,action='store',help='whether to use colors (yes/no/auto) [default: auto]',choices=('yes','no','auto')) - p('-j','--jobs',dest='jobs',default=jobs,type='int',help='amount of parallel jobs (%r)'%jobs) - p('-k','--keep',dest='keep',default=0,action='count',help='continue despite errors (-kk to try harder)') - p('-v','--verbose',dest='verbose',default=0,action='count',help='verbosity level -v -vv or -vvv [default: 0]') - p('--zones',dest='zones',default='',action='store',help='debugging zones (task_gen, deps, tasks, etc)') - gr=self.add_option_group('Configuration options') - self.option_groups['configure options']=gr - gr.add_option('-o','--out',action='store',default='',help='build dir for the project',dest='out') - gr.add_option('-t','--top',action='store',default='',help='src dir for the project',dest='top') - gr.add_option('--no-lock-in-run',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_run') - gr.add_option('--no-lock-in-out',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_out') - gr.add_option('--no-lock-in-top',action='store_true',default='',help=optparse.SUPPRESS_HELP,dest='no_lock_in_top') - default_prefix=getattr(Context.g_module,'default_prefix',os.environ.get('PREFIX')) - if not default_prefix: - if platform=='win32': - d=tempfile.gettempdir() - default_prefix=d[0].upper()+d[1:] - else: - default_prefix='/usr/local/' - gr.add_option('--prefix',dest='prefix',default=default_prefix,help='installation prefix [default: %r]'%default_prefix) - gr.add_option('--bindir',dest='bindir',help='bindir') - gr.add_option('--libdir',dest='libdir',help='libdir') - gr=self.add_option_group('Build and installation options') - self.option_groups['build and install options']=gr - gr.add_option('-p','--progress',dest='progress_bar',default=0,action='count',help='-p: progress bar; -pp: ide output') - gr.add_option('--targets',dest='targets',default='',action='store',help='task generators, e.g. "target1,target2"') - gr=self.add_option_group('Step options') - self.option_groups['step options']=gr - gr.add_option('--files',dest='files',default='',action='store',help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"') - default_destdir=os.environ.get('DESTDIR','') - gr=self.add_option_group('Installation and uninstallation options') - self.option_groups['install/uninstall options']=gr - gr.add_option('--destdir',help='installation root [default: %r]'%default_destdir,default=default_destdir,dest='destdir') - gr.add_option('-f','--force',dest='force',default=False,action='store_true',help='force file installation') - gr.add_option('--distcheck-args',metavar='ARGS',help='arguments to pass to distcheck',default=None,action='store') - def jobs(self): - count=int(os.environ.get('JOBS',0)) - if count<1: - if'NUMBER_OF_PROCESSORS'in os.environ: - count=int(os.environ.get('NUMBER_OF_PROCESSORS',1)) - else: - if hasattr(os,'sysconf_names'): - if'SC_NPROCESSORS_ONLN'in os.sysconf_names: - count=int(os.sysconf('SC_NPROCESSORS_ONLN')) - elif'SC_NPROCESSORS_CONF'in os.sysconf_names: - count=int(os.sysconf('SC_NPROCESSORS_CONF')) - if not count and os.name not in('nt','java'): - try: - tmp=self.cmd_and_log(['sysctl','-n','hw.ncpu'],quiet=0) - except Exception: - pass - else: - if re.match('^[0-9]+$',tmp): - count=int(tmp) - if count<1: - count=1 - elif count>1024: - count=1024 - return count - def add_option(self,*k,**kw): - return self.parser.add_option(*k,**kw) - def add_option_group(self,*k,**kw): - try: - gr=self.option_groups[k[0]] - except KeyError: - gr=self.parser.add_option_group(*k,**kw) - self.option_groups[k[0]]=gr - return gr - def get_option_group(self,opt_str): - try: - return self.option_groups[opt_str] - except KeyError: - for group in self.parser.option_groups: - if group.title==opt_str: - return group - return None - def parse_args(self,_args=None): - global options,commands,envvars - (options,leftover_args)=self.parser.parse_args(args=_args) - for arg in leftover_args: - if'='in arg: - envvars.append(arg) - else: - commands.append(arg) - if options.destdir: - options.destdir=Utils.sane_path(options.destdir) - if options.verbose>=1: - self.load('errcheck') - colors={'yes':2,'auto':1,'no':0}[options.colors] - Logs.enable_colors(colors) - def execute(self): - super(OptionsContext,self).execute() - self.parse_args() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py deleted file mode 100644 index ab661a22..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Runner.py +++ /dev/null @@ -1,207 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import random,atexit -try: - from queue import Queue -except ImportError: - from Queue import Queue -from waflib import Utils,Task,Errors,Logs -GAP=10 -class TaskConsumer(Utils.threading.Thread): - def __init__(self): - Utils.threading.Thread.__init__(self) - self.ready=Queue() - self.setDaemon(1) - self.start() - def run(self): - try: - self.loop() - except Exception: - pass - def loop(self): - while 1: - tsk=self.ready.get() - if not isinstance(tsk,Task.TaskBase): - tsk(self) - else: - tsk.process() -pool=Queue() -def get_pool(): - try: - return pool.get(False) - except Exception: - return TaskConsumer() -def put_pool(x): - pool.put(x) -def _free_resources(): - global pool - lst=[] - while pool.qsize(): - lst.append(pool.get()) - for x in lst: - x.ready.put(None) - for x in lst: - x.join() - pool=None -atexit.register(_free_resources) -class Parallel(object): - def __init__(self,bld,j=2): - self.numjobs=j - self.bld=bld - self.outstanding=[] - self.frozen=[] - self.out=Queue(0) - self.count=0 - self.processed=1 - self.stop=False - self.error=[] - self.biter=None - self.dirty=False - def get_next_task(self): - if not self.outstanding: - return None - return self.outstanding.pop(0) - def postpone(self,tsk): - if random.randint(0,1): - self.frozen.insert(0,tsk) - else: - self.frozen.append(tsk) - def refill_task_list(self): - while self.count>self.numjobs*GAP: - self.get_out() - while not self.outstanding: - if self.count: - self.get_out() - elif self.frozen: - try: - cond=self.deadlock==self.processed - except AttributeError: - pass - else: - if cond: - msg='check the build order for the tasks' - for tsk in self.frozen: - if not tsk.run_after: - msg='check the methods runnable_status' - break - lst=[] - for tsk in self.frozen: - lst.append('%s\t-> %r'%(repr(tsk),[id(x)for x in tsk.run_after])) - raise Errors.WafError('Deadlock detected: %s%s'%(msg,''.join(lst))) - self.deadlock=self.processed - if self.frozen: - self.outstanding+=self.frozen - self.frozen=[] - elif not self.count: - self.outstanding.extend(next(self.biter)) - self.total=self.bld.total() - break - def add_more_tasks(self,tsk): - if getattr(tsk,'more_tasks',None): - self.outstanding+=tsk.more_tasks - self.total+=len(tsk.more_tasks) - def get_out(self): - tsk=self.out.get() - if not self.stop: - self.add_more_tasks(tsk) - self.count-=1 - self.dirty=True - return tsk - def add_task(self,tsk): - try: - self.pool - except AttributeError: - self.init_task_pool() - self.ready.put(tsk) - def init_task_pool(self): - pool=self.pool=[get_pool()for i in range(self.numjobs)] - self.ready=Queue(0) - def setq(consumer): - consumer.ready=self.ready - for x in pool: - x.ready.put(setq) - return pool - def free_task_pool(self): - def setq(consumer): - consumer.ready=Queue(0) - self.out.put(self) - try: - pool=self.pool - except AttributeError: - pass - else: - for x in pool: - self.ready.put(setq) - for x in pool: - self.get_out() - for x in pool: - put_pool(x) - self.pool=[] - def skip(self,tsk): - tsk.hasrun=Task.SKIPPED - def error_handler(self,tsk): - if hasattr(tsk,'scan')and hasattr(tsk,'uid'): - key=(tsk.uid(),'imp') - try: - del self.bld.task_sigs[key] - except KeyError: - pass - if not self.bld.keep: - self.stop=True - self.error.append(tsk) - def task_status(self,tsk): - try: - return tsk.runnable_status() - except Exception: - self.processed+=1 - tsk.err_msg=Utils.ex_stack() - if not self.stop and self.bld.keep: - self.skip(tsk) - if self.bld.keep==1: - if Logs.verbose>1 or not self.error: - self.error.append(tsk) - self.stop=True - else: - if Logs.verbose>1: - self.error.append(tsk) - return Task.EXCEPTION - tsk.hasrun=Task.EXCEPTION - self.error_handler(tsk) - return Task.EXCEPTION - def start(self): - self.total=self.bld.total() - while not self.stop: - self.refill_task_list() - tsk=self.get_next_task() - if not tsk: - if self.count: - continue - else: - break - if tsk.hasrun: - self.processed+=1 - continue - if self.stop: - break - st=self.task_status(tsk) - if st==Task.RUN_ME: - tsk.position=(self.processed,self.total) - self.count+=1 - tsk.master=self - self.processed+=1 - if self.numjobs==1: - tsk.process() - else: - self.add_task(tsk) - if st==Task.ASK_LATER: - self.postpone(tsk) - elif st==Task.SKIP_ME: - self.processed+=1 - self.skip(tsk) - self.add_more_tasks(tsk) - while self.error and self.count: - self.get_out() - assert(self.count==0 or self.stop) - self.free_task_pool() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py deleted file mode 100644 index dcbab750..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Scripting.py +++ /dev/null @@ -1,407 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,shlex,shutil,traceback,errno,sys,stat -from waflib import Utils,Configure,Logs,Options,ConfigSet,Context,Errors,Build,Node -build_dir_override=None -no_climb_commands=['configure'] -default_cmd="build" -def waf_entry_point(current_directory,version,wafdir): - Logs.init_log() - if Context.WAFVERSION!=version: - Logs.error('Waf script %r and library %r do not match (directory %r)'%(version,Context.WAFVERSION,wafdir)) - sys.exit(1) - if'--version'in sys.argv: - Context.run_dir=current_directory - ctx=Context.create_context('options') - ctx.curdir=current_directory - ctx.parse_args() - sys.exit(0) - if len(sys.argv)>1: - potential_wscript=os.path.join(current_directory,sys.argv[1]) - if os.path.basename(potential_wscript)=='wscript'and os.path.isfile(potential_wscript): - current_directory=os.path.normpath(os.path.dirname(potential_wscript)) - sys.argv.pop(1) - Context.waf_dir=wafdir - Context.launch_dir=current_directory - no_climb=os.environ.get('NOCLIMB',None) - if not no_climb: - for k in no_climb_commands: - for y in sys.argv: - if y.startswith(k): - no_climb=True - break - for i,x in enumerate(sys.argv): - if x.startswith('--top='): - Context.run_dir=Context.top_dir=Utils.sane_path(x[6:]) - sys.argv[i]='--top='+Context.run_dir - if x.startswith('--out='): - Context.out_dir=Utils.sane_path(x[6:]) - sys.argv[i]='--out='+Context.out_dir - cur=current_directory - while cur and not Context.top_dir: - lst=os.listdir(cur) - if Options.lockfile in lst: - env=ConfigSet.ConfigSet() - try: - env.load(os.path.join(cur,Options.lockfile)) - ino=os.stat(cur)[stat.ST_INO] - except Exception: - pass - else: - for x in(env.run_dir,env.top_dir,env.out_dir): - if Utils.is_win32: - if cur==x: - load=True - break - else: - try: - ino2=os.stat(x)[stat.ST_INO] - except OSError: - pass - else: - if ino==ino2: - load=True - break - else: - Logs.warn('invalid lock file in %s'%cur) - load=False - if load: - Context.run_dir=env.run_dir - Context.top_dir=env.top_dir - Context.out_dir=env.out_dir - break - if not Context.run_dir: - if Context.WSCRIPT_FILE in lst: - Context.run_dir=cur - next=os.path.dirname(cur) - if next==cur: - break - cur=next - if no_climb: - break - if not Context.run_dir: - if'-h'in sys.argv or'--help'in sys.argv: - Logs.warn('No wscript file found: the help message may be incomplete') - Context.run_dir=current_directory - ctx=Context.create_context('options') - ctx.curdir=current_directory - ctx.parse_args() - sys.exit(0) - Logs.error('Waf: Run from a directory containing a file named %r'%Context.WSCRIPT_FILE) - sys.exit(1) - try: - os.chdir(Context.run_dir) - except OSError: - Logs.error('Waf: The folder %r is unreadable'%Context.run_dir) - sys.exit(1) - try: - set_main_module(os.path.normpath(os.path.join(Context.run_dir,Context.WSCRIPT_FILE))) - except Errors.WafError as e: - Logs.pprint('RED',e.verbose_msg) - Logs.error(str(e)) - sys.exit(1) - except Exception as e: - Logs.error('Waf: The wscript in %r is unreadable'%Context.run_dir,e) - traceback.print_exc(file=sys.stdout) - sys.exit(2) - try: - run_commands() - except Errors.WafError as e: - if Logs.verbose>1: - Logs.pprint('RED',e.verbose_msg) - Logs.error(e.msg) - sys.exit(1) - except SystemExit: - raise - except Exception as e: - traceback.print_exc(file=sys.stdout) - sys.exit(2) - except KeyboardInterrupt: - Logs.pprint('RED','Interrupted') - sys.exit(68) -def set_main_module(file_path): - Context.g_module=Context.load_module(file_path) - Context.g_module.root_path=file_path - def set_def(obj): - name=obj.__name__ - if not name in Context.g_module.__dict__: - setattr(Context.g_module,name,obj) - for k in(update,dist,distclean,distcheck): - set_def(k) - if not'init'in Context.g_module.__dict__: - Context.g_module.init=Utils.nada - if not'shutdown'in Context.g_module.__dict__: - Context.g_module.shutdown=Utils.nada - if not'options'in Context.g_module.__dict__: - Context.g_module.options=Utils.nada -def parse_options(): - Context.create_context('options').execute() - for var in Options.envvars: - (name,value)=var.split('=',1) - os.environ[name.strip()]=value - if not Options.commands: - Options.commands=[default_cmd] - Options.commands=[x for x in Options.commands if x!='options'] - Logs.verbose=Options.options.verbose - if Options.options.zones: - Logs.zones=Options.options.zones.split(',') - if not Logs.verbose: - Logs.verbose=1 - elif Logs.verbose>0: - Logs.zones=['runner'] - if Logs.verbose>2: - Logs.zones=['*'] -def run_command(cmd_name): - ctx=Context.create_context(cmd_name) - ctx.log_timer=Utils.Timer() - ctx.options=Options.options - ctx.cmd=cmd_name - try: - ctx.execute() - finally: - ctx.finalize() - return ctx -def run_commands(): - parse_options() - run_command('init') - while Options.commands: - cmd_name=Options.commands.pop(0) - ctx=run_command(cmd_name) - Logs.info('%r finished successfully (%s)'%(cmd_name,str(ctx.log_timer))) - run_command('shutdown') -def _can_distclean(name): - for k in'.o .moc .exe'.split(): - if name.endswith(k): - return True - return False -def distclean_dir(dirname): - for(root,dirs,files)in os.walk(dirname): - for f in files: - if _can_distclean(f): - fname=os.path.join(root,f) - try: - os.remove(fname) - except OSError: - Logs.warn('Could not remove %r'%fname) - for x in(Context.DBFILE,'config.log'): - try: - os.remove(x) - except OSError: - pass - try: - shutil.rmtree('c4che') - except OSError: - pass -def distclean(ctx): - '''removes the build directory''' - lst=os.listdir('.') - for f in lst: - if f==Options.lockfile: - try: - proj=ConfigSet.ConfigSet(f) - except IOError: - Logs.warn('Could not read %r'%f) - continue - if proj['out_dir']!=proj['top_dir']: - try: - shutil.rmtree(proj['out_dir']) - except IOError: - pass - except OSError as e: - if e.errno!=errno.ENOENT: - Logs.warn('Could not remove %r'%proj['out_dir']) - else: - distclean_dir(proj['out_dir']) - for k in(proj['out_dir'],proj['top_dir'],proj['run_dir']): - p=os.path.join(k,Options.lockfile) - try: - os.remove(p) - except OSError as e: - if e.errno!=errno.ENOENT: - Logs.warn('Could not remove %r'%p) - if not Options.commands: - for x in'.waf-1. waf-1. .waf3-1. waf3-1.'.split(): - if f.startswith(x): - shutil.rmtree(f,ignore_errors=True) -class Dist(Context.Context): - '''creates an archive containing the project source code''' - cmd='dist' - fun='dist' - algo='tar.bz2' - ext_algo={} - def execute(self): - self.recurse([os.path.dirname(Context.g_module.root_path)]) - self.archive() - def archive(self): - import tarfile - arch_name=self.get_arch_name() - try: - self.base_path - except AttributeError: - self.base_path=self.path - node=self.base_path.make_node(arch_name) - try: - node.delete() - except OSError: - pass - files=self.get_files() - if self.algo.startswith('tar.'): - tar=tarfile.open(arch_name,'w:'+self.algo.replace('tar.','')) - for x in files: - self.add_tar_file(x,tar) - tar.close() - elif self.algo=='zip': - import zipfile - zip=zipfile.ZipFile(arch_name,'w',compression=zipfile.ZIP_DEFLATED) - for x in files: - archive_name=self.get_base_name()+'/'+x.path_from(self.base_path) - zip.write(x.abspath(),archive_name,zipfile.ZIP_DEFLATED) - zip.close() - else: - self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip') - try: - from hashlib import sha1 as sha - except ImportError: - from sha import sha - try: - digest=" (sha=%r)"%sha(node.read()).hexdigest() - except Exception: - digest='' - Logs.info('New archive created: %s%s'%(self.arch_name,digest)) - def get_tar_path(self,node): - return node.abspath() - def add_tar_file(self,x,tar): - p=self.get_tar_path(x) - tinfo=tar.gettarinfo(name=p,arcname=self.get_tar_prefix()+'/'+x.path_from(self.base_path)) - tinfo.uid=0 - tinfo.gid=0 - tinfo.uname='root' - tinfo.gname='root' - fu=None - try: - fu=open(p,'rb') - tar.addfile(tinfo,fileobj=fu) - finally: - if fu: - fu.close() - def get_tar_prefix(self): - try: - return self.tar_prefix - except AttributeError: - return self.get_base_name() - def get_arch_name(self): - try: - self.arch_name - except AttributeError: - self.arch_name=self.get_base_name()+'.'+self.ext_algo.get(self.algo,self.algo) - return self.arch_name - def get_base_name(self): - try: - self.base_name - except AttributeError: - appname=getattr(Context.g_module,Context.APPNAME,'noname') - version=getattr(Context.g_module,Context.VERSION,'1.0') - self.base_name=appname+'-'+version - return self.base_name - def get_excl(self): - try: - return self.excl - except AttributeError: - self.excl=Node.exclude_regs+' **/waf-1.8.* **/.waf-1.8* **/waf3-1.8.* **/.waf3-1.8* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*' - if Context.out_dir: - nd=self.root.find_node(Context.out_dir) - if nd: - self.excl+=' '+nd.path_from(self.base_path) - return self.excl - def get_files(self): - try: - files=self.files - except AttributeError: - files=self.base_path.ant_glob('**/*',excl=self.get_excl()) - return files -def dist(ctx): - '''makes a tarball for redistributing the sources''' - pass -class DistCheck(Dist): - fun='distcheck' - cmd='distcheck' - def execute(self): - self.recurse([os.path.dirname(Context.g_module.root_path)]) - self.archive() - self.check() - def check(self): - import tempfile,tarfile - t=None - try: - t=tarfile.open(self.get_arch_name()) - for x in t: - t.extract(x) - finally: - if t: - t.close() - cfg=[] - if Options.options.distcheck_args: - cfg=shlex.split(Options.options.distcheck_args) - else: - cfg=[x for x in sys.argv if x.startswith('-')] - instdir=tempfile.mkdtemp('.inst',self.get_base_name()) - ret=Utils.subprocess.Popen([sys.executable,sys.argv[0],'configure','install','uninstall','--destdir='+instdir]+cfg,cwd=self.get_base_name()).wait() - if ret: - raise Errors.WafError('distcheck failed with code %i'%ret) - if os.path.exists(instdir): - raise Errors.WafError('distcheck succeeded, but files were left in %s'%instdir) - shutil.rmtree(self.get_base_name()) -def distcheck(ctx): - '''checks if the project compiles (tarball from 'dist')''' - pass -def update(ctx): - lst=Options.options.files - if lst: - lst=lst.split(',') - else: - path=os.path.join(Context.waf_dir,'waflib','extras') - lst=[x for x in Utils.listdir(path)if x.endswith('.py')] - for x in lst: - tool=x.replace('.py','') - if not tool: - continue - try: - dl=Configure.download_tool - except AttributeError: - ctx.fatal('The command "update" is dangerous; include the tool "use_config" in your project!') - try: - dl(tool,force=True,ctx=ctx) - except Errors.WafError: - Logs.error('Could not find the tool %r in the remote repository'%x) - else: - Logs.warn('Updated %r'%tool) -def autoconfigure(execute_method): - def execute(self): - if not Configure.autoconfig: - return execute_method(self) - env=ConfigSet.ConfigSet() - do_config=False - try: - env.load(os.path.join(Context.top_dir,Options.lockfile)) - except Exception: - Logs.warn('Configuring the project') - do_config=True - else: - if env.run_dir!=Context.run_dir: - do_config=True - else: - h=0 - for f in env['files']: - h=Utils.h_list((h,Utils.readf(f,'rb'))) - do_config=h!=env.hash - if do_config: - Options.commands.insert(0,self.cmd) - Options.commands.insert(0,'configure') - if Configure.autoconfig=='clobber': - Options.options.__dict__=env.options - return - return execute_method(self) - return execute -Build.BuildContext.execute=autoconfigure(Build.BuildContext.execute) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py deleted file mode 100644 index 89ed5f62..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Task.py +++ /dev/null @@ -1,686 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re,sys -from waflib import Utils,Logs,Errors -NOT_RUN=0 -MISSING=1 -CRASHED=2 -EXCEPTION=3 -SKIPPED=8 -SUCCESS=9 -ASK_LATER=-1 -SKIP_ME=-2 -RUN_ME=-3 -COMPILE_TEMPLATE_SHELL=''' -def f(tsk): - env = tsk.env - gen = tsk.generator - bld = gen.bld - cwdx = getattr(bld, 'cwdx', bld.bldnode) # TODO single cwd value in waf 1.9 - wd = getattr(tsk, 'cwd', None) - p = env.get_flat - tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s - return tsk.exec_command(cmd, cwd=wd, env=env.env or None) -''' -COMPILE_TEMPLATE_NOSHELL=''' -def f(tsk): - env = tsk.env - gen = tsk.generator - bld = gen.bld - cwdx = getattr(bld, 'cwdx', bld.bldnode) # TODO single cwd value in waf 1.9 - wd = getattr(tsk, 'cwd', None) - def to_list(xx): - if isinstance(xx, str): return [xx] - return xx - tsk.last_cmd = lst = [] - %s - lst = [x for x in lst if x] - return tsk.exec_command(lst, cwd=wd, env=env.env or None) -''' -classes={} -class store_task_type(type): - def __init__(cls,name,bases,dict): - super(store_task_type,cls).__init__(name,bases,dict) - name=cls.__name__ - if name.endswith('_task'): - name=name.replace('_task','') - if name!='evil'and name!='TaskBase': - global classes - if getattr(cls,'run_str',None): - (f,dvars)=compile_fun(cls.run_str,cls.shell) - cls.hcode=Utils.h_cmd(cls.run_str) - cls.orig_run_str=cls.run_str - cls.run_str=None - cls.run=f - cls.vars=list(set(cls.vars+dvars)) - cls.vars.sort() - elif getattr(cls,'run',None)and not'hcode'in cls.__dict__: - cls.hcode=Utils.h_cmd(cls.run) - getattr(cls,'register',classes)[name]=cls -evil=store_task_type('evil',(object,),{}) -class TaskBase(evil): - color='GREEN' - ext_in=[] - ext_out=[] - before=[] - after=[] - hcode='' - def __init__(self,*k,**kw): - self.hasrun=NOT_RUN - try: - self.generator=kw['generator'] - except KeyError: - self.generator=self - def __repr__(self): - return'\n\t{task %r: %s %s}'%(self.__class__.__name__,id(self),str(getattr(self,'fun',''))) - def __str__(self): - if hasattr(self,'fun'): - return self.fun.__name__ - return self.__class__.__name__ - def __hash__(self): - return id(self) - def keyword(self): - if hasattr(self,'fun'): - return'Function' - return'Processing' - def exec_command(self,cmd,**kw): - bld=self.generator.bld - try: - if not kw.get('cwd',None): - kw['cwd']=bld.cwd - except AttributeError: - bld.cwd=kw['cwd']=bld.variant_dir - return bld.exec_command(cmd,**kw) - def runnable_status(self): - return RUN_ME - def process(self): - m=self.master - if m.stop: - m.out.put(self) - return - try: - del self.generator.bld.task_sigs[self.uid()] - except KeyError: - pass - try: - self.generator.bld.returned_tasks.append(self) - self.log_display(self.generator.bld) - ret=self.run() - except Exception: - self.err_msg=Utils.ex_stack() - self.hasrun=EXCEPTION - m.error_handler(self) - m.out.put(self) - return - if ret: - self.err_code=ret - self.hasrun=CRASHED - else: - try: - self.post_run() - except Errors.WafError: - pass - except Exception: - self.err_msg=Utils.ex_stack() - self.hasrun=EXCEPTION - else: - self.hasrun=SUCCESS - if self.hasrun!=SUCCESS: - m.error_handler(self) - m.out.put(self) - def run(self): - if hasattr(self,'fun'): - return self.fun(self) - return 0 - def post_run(self): - pass - def log_display(self,bld): - if self.generator.bld.progress_bar==3: - return - s=self.display() - if s: - if bld.logger: - logger=bld.logger - else: - logger=Logs - if self.generator.bld.progress_bar==1: - c1=Logs.colors.cursor_off - c2=Logs.colors.cursor_on - logger.info(s,extra={'stream':sys.stderr,'terminator':'','c1':c1,'c2':c2}) - else: - logger.info(s,extra={'terminator':'','c1':'','c2':''}) - def display(self): - col1=Logs.colors(self.color) - col2=Logs.colors.NORMAL - master=self.master - def cur(): - tmp=-1 - if hasattr(master,'ready'): - tmp-=master.ready.qsize() - return master.processed+tmp - if self.generator.bld.progress_bar==1: - return self.generator.bld.progress_line(cur(),master.total,col1,col2) - if self.generator.bld.progress_bar==2: - ela=str(self.generator.bld.timer) - try: - ins=','.join([n.name for n in self.inputs]) - except AttributeError: - ins='' - try: - outs=','.join([n.name for n in self.outputs]) - except AttributeError: - outs='' - return'|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n'%(master.total,cur(),ins,outs,ela) - s=str(self) - if not s: - return None - total=master.total - n=len(str(total)) - fs='[%%%dd/%%%dd] %%s%%s%%s%%s\n'%(n,n) - kw=self.keyword() - if kw: - kw+=' ' - return fs%(cur(),total,kw,col1,s,col2) - def attr(self,att,default=None): - ret=getattr(self,att,self) - if ret is self:return getattr(self.__class__,att,default) - return ret - def hash_constraints(self): - cls=self.__class__ - tup=(str(cls.before),str(cls.after),str(cls.ext_in),str(cls.ext_out),cls.__name__,cls.hcode) - h=hash(tup) - return h - def format_error(self): - msg=getattr(self,'last_cmd','') - name=getattr(self.generator,'name','') - if getattr(self,"err_msg",None): - return self.err_msg - elif not self.hasrun: - return'task in %r was not executed for some reason: %r'%(name,self) - elif self.hasrun==CRASHED: - try: - return' -> task in %r failed (exit status %r): %r\n%r'%(name,self.err_code,self,msg) - except AttributeError: - return' -> task in %r failed: %r\n%r'%(name,self,msg) - elif self.hasrun==MISSING: - return' -> missing files in %r: %r\n%r'%(name,self,msg) - else: - return'invalid status for task in %r: %r'%(name,self.hasrun) - def colon(self,var1,var2): - tmp=self.env[var1] - if not tmp: - return[] - if isinstance(var2,str): - it=self.env[var2] - else: - it=var2 - if isinstance(tmp,str): - return[tmp%x for x in it] - else: - lst=[] - for y in it: - lst.extend(tmp) - lst.append(y) - return lst -class Task(TaskBase): - vars=[] - shell=False - def __init__(self,*k,**kw): - TaskBase.__init__(self,*k,**kw) - self.env=kw['env'] - self.inputs=[] - self.outputs=[] - self.dep_nodes=[] - self.run_after=set([]) - def __str__(self): - name=self.__class__.__name__ - if self.outputs: - if(name.endswith('lib')or name.endswith('program'))or not self.inputs: - node=self.outputs[0] - return node.path_from(node.ctx.launch_node()) - if not(self.inputs or self.outputs): - return self.__class__.__name__ - if len(self.inputs)==1: - node=self.inputs[0] - return node.path_from(node.ctx.launch_node()) - src_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.inputs]) - tgt_str=' '.join([a.path_from(a.ctx.launch_node())for a in self.outputs]) - if self.outputs:sep=' -> ' - else:sep='' - return'%s: %s%s%s'%(self.__class__.__name__.replace('_task',''),src_str,sep,tgt_str) - def keyword(self): - name=self.__class__.__name__ - if name.endswith('lib')or name.endswith('program'): - return'Linking' - if len(self.inputs)==1 and len(self.outputs)==1: - return'Compiling' - if not self.inputs: - if self.outputs: - return'Creating' - else: - return'Running' - return'Processing' - def __repr__(self): - try: - ins=",".join([x.name for x in self.inputs]) - outs=",".join([x.name for x in self.outputs]) - except AttributeError: - ins=",".join([str(x)for x in self.inputs]) - outs=",".join([str(x)for x in self.outputs]) - return"".join(['\n\t{task %r: '%id(self),self.__class__.__name__," ",ins," -> ",outs,'}']) - def uid(self): - try: - return self.uid_ - except AttributeError: - m=Utils.md5() - up=m.update - up(self.__class__.__name__) - for x in self.inputs+self.outputs: - up(x.abspath()) - self.uid_=m.digest() - return self.uid_ - def set_inputs(self,inp): - if isinstance(inp,list):self.inputs+=inp - else:self.inputs.append(inp) - def set_outputs(self,out): - if isinstance(out,list):self.outputs+=out - else:self.outputs.append(out) - def set_run_after(self,task): - assert isinstance(task,TaskBase) - self.run_after.add(task) - def signature(self): - try:return self.cache_sig - except AttributeError:pass - self.m=Utils.md5() - self.m.update(self.hcode) - self.sig_explicit_deps() - self.sig_vars() - if self.scan: - try: - self.sig_implicit_deps() - except Errors.TaskRescan: - return self.signature() - ret=self.cache_sig=self.m.digest() - return ret - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return ASK_LATER - bld=self.generator.bld - try: - new_sig=self.signature() - except Errors.TaskNotReady: - return ASK_LATER - key=self.uid() - try: - prev_sig=bld.task_sigs[key] - except KeyError: - Logs.debug("task: task %r must run as it was never run before or the task code changed"%self) - return RUN_ME - for node in self.outputs: - try: - if node.sig!=new_sig: - return RUN_ME - except AttributeError: - Logs.debug("task: task %r must run as the output nodes do not exist"%self) - return RUN_ME - if new_sig!=prev_sig: - return RUN_ME - return SKIP_ME - def post_run(self): - bld=self.generator.bld - sig=self.signature() - for node in self.outputs: - try: - os.stat(node.abspath()) - except OSError: - self.hasrun=MISSING - self.err_msg='-> missing file: %r'%node.abspath() - raise Errors.WafError(self.err_msg) - node.sig=node.cache_sig=sig - bld.task_sigs[self.uid()]=self.cache_sig - def sig_explicit_deps(self): - bld=self.generator.bld - upd=self.m.update - for x in self.inputs+self.dep_nodes: - try: - upd(x.get_bld_sig()) - except(AttributeError,TypeError): - raise Errors.WafError('Missing node signature for %r (required by %r)'%(x,self)) - if bld.deps_man: - additional_deps=bld.deps_man - for x in self.inputs+self.outputs: - try: - d=additional_deps[id(x)] - except KeyError: - continue - for v in d: - if isinstance(v,bld.root.__class__): - try: - v=v.get_bld_sig() - except AttributeError: - raise Errors.WafError('Missing node signature for %r (required by %r)'%(v,self)) - elif hasattr(v,'__call__'): - v=v() - upd(v) - return self.m.digest() - def sig_vars(self): - bld=self.generator.bld - env=self.env - upd=self.m.update - act_sig=bld.hash_env_vars(env,self.__class__.vars) - upd(act_sig) - dep_vars=getattr(self,'dep_vars',None) - if dep_vars: - upd(bld.hash_env_vars(env,dep_vars)) - return self.m.digest() - scan=None - def sig_implicit_deps(self): - bld=self.generator.bld - key=self.uid() - prev=bld.task_sigs.get((key,'imp'),[]) - if prev: - try: - if prev==self.compute_sig_implicit_deps(): - return prev - except Errors.TaskNotReady: - raise - except EnvironmentError: - for x in bld.node_deps.get(self.uid(),[]): - if not x.is_bld(): - try: - os.stat(x.abspath()) - except OSError: - try: - del x.parent.children[x.name] - except KeyError: - pass - del bld.task_sigs[(key,'imp')] - raise Errors.TaskRescan('rescan') - (nodes,names)=self.scan() - if Logs.verbose: - Logs.debug('deps: scanner for %s returned %s %s'%(str(self),str(nodes),str(names))) - bld.node_deps[key]=nodes - bld.raw_deps[key]=names - self.are_implicit_nodes_ready() - try: - bld.task_sigs[(key,'imp')]=sig=self.compute_sig_implicit_deps() - except Exception: - if Logs.verbose: - for k in bld.node_deps.get(self.uid(),[]): - try: - k.get_bld_sig() - except Exception: - Logs.warn('Missing signature for node %r (may cause rebuilds)'%k) - else: - return sig - def compute_sig_implicit_deps(self): - upd=self.m.update - bld=self.generator.bld - self.are_implicit_nodes_ready() - for k in bld.node_deps.get(self.uid(),[]): - upd(k.get_bld_sig()) - return self.m.digest() - def are_implicit_nodes_ready(self): - bld=self.generator.bld - try: - cache=bld.dct_implicit_nodes - except AttributeError: - bld.dct_implicit_nodes=cache={} - try: - dct=cache[bld.cur] - except KeyError: - dct=cache[bld.cur]={} - for tsk in bld.cur_tasks: - for x in tsk.outputs: - dct[x]=tsk - modified=False - for x in bld.node_deps.get(self.uid(),[]): - if x in dct: - self.run_after.add(dct[x]) - modified=True - if modified: - for tsk in self.run_after: - if not tsk.hasrun: - raise Errors.TaskNotReady('not ready') -if sys.hexversion>0x3000000: - def uid(self): - try: - return self.uid_ - except AttributeError: - m=Utils.md5() - up=m.update - up(self.__class__.__name__.encode('iso8859-1','xmlcharrefreplace')) - for x in self.inputs+self.outputs: - up(x.abspath().encode('iso8859-1','xmlcharrefreplace')) - self.uid_=m.digest() - return self.uid_ - uid.__doc__=Task.uid.__doc__ - Task.uid=uid -def is_before(t1,t2): - to_list=Utils.to_list - for k in to_list(t2.ext_in): - if k in to_list(t1.ext_out): - return 1 - if t1.__class__.__name__ in to_list(t2.after): - return 1 - if t2.__class__.__name__ in to_list(t1.before): - return 1 - return 0 -def set_file_constraints(tasks): - ins=Utils.defaultdict(set) - outs=Utils.defaultdict(set) - for x in tasks: - for a in getattr(x,'inputs',[])+getattr(x,'dep_nodes',[]): - ins[id(a)].add(x) - for a in getattr(x,'outputs',[]): - outs[id(a)].add(x) - links=set(ins.keys()).intersection(outs.keys()) - for k in links: - for a in ins[k]: - a.run_after.update(outs[k]) -def set_precedence_constraints(tasks): - cstr_groups=Utils.defaultdict(list) - for x in tasks: - h=x.hash_constraints() - cstr_groups[h].append(x) - keys=list(cstr_groups.keys()) - maxi=len(keys) - for i in range(maxi): - t1=cstr_groups[keys[i]][0] - for j in range(i+1,maxi): - t2=cstr_groups[keys[j]][0] - if is_before(t1,t2): - a=i - b=j - elif is_before(t2,t1): - a=j - b=i - else: - continue - aval=set(cstr_groups[keys[a]]) - for x in cstr_groups[keys[b]]: - x.run_after.update(aval) -def funex(c): - dc={} - exec(c,dc) - return dc['f'] -re_novar=re.compile(r"^(SRC|TGT)\W+.*?$") -reg_act=re.compile(r"(?P\\)|(?P\$\$)|(?P\$\{(?P\w+)(?P.*?)\})",re.M) -def compile_fun_shell(line): - extr=[] - def repl(match): - g=match.group - if g('dollar'):return"$" - elif g('backslash'):return'\\\\' - elif g('subst'):extr.append((g('var'),g('code')));return"%s" - return None - line=reg_act.sub(repl,line)or line - parm=[] - dvars=[] - app=parm.append - for(var,meth)in extr: - if var=='SRC': - if meth:app('tsk.inputs%s'%meth) - else:app('" ".join([a.path_from(cwdx) for a in tsk.inputs])') - elif var=='TGT': - if meth:app('tsk.outputs%s'%meth) - else:app('" ".join([a.path_from(cwdx) for a in tsk.outputs])') - elif meth: - if meth.startswith(':'): - m=meth[1:] - if m=='SRC': - m='[a.path_from(cwdx) for a in tsk.inputs]' - elif m=='TGT': - m='[a.path_from(cwdx) for a in tsk.outputs]' - elif re_novar.match(m): - m='[tsk.inputs%s]'%m[3:] - elif re_novar.match(m): - m='[tsk.outputs%s]'%m[3:] - elif m[:3]not in('tsk','gen','bld'): - dvars.extend([var,meth[1:]]) - m='%r'%m - app('" ".join(tsk.colon(%r, %s))'%(var,m)) - else: - app('%s%s'%(var,meth)) - else: - if not var in dvars:dvars.append(var) - app("p('%s')"%var) - if parm:parm="%% (%s) "%(',\n\t\t'.join(parm)) - else:parm='' - c=COMPILE_TEMPLATE_SHELL%(line,parm) - Logs.debug('action: %s'%c.strip().splitlines()) - return(funex(c),dvars) -def compile_fun_noshell(line): - extr=[] - def repl(match): - g=match.group - if g('dollar'):return"$" - elif g('backslash'):return'\\' - elif g('subst'):extr.append((g('var'),g('code')));return"<<|@|>>" - return None - line2=reg_act.sub(repl,line) - params=line2.split('<<|@|>>') - assert(extr) - buf=[] - dvars=[] - app=buf.append - for x in range(len(extr)): - params[x]=params[x].strip() - if params[x]: - app("lst.extend(%r)"%params[x].split()) - (var,meth)=extr[x] - if var=='SRC': - if meth:app('lst.append(tsk.inputs%s)'%meth) - else:app("lst.extend([a.path_from(cwdx) for a in tsk.inputs])") - elif var=='TGT': - if meth:app('lst.append(tsk.outputs%s)'%meth) - else:app("lst.extend([a.path_from(cwdx) for a in tsk.outputs])") - elif meth: - if meth.startswith(':'): - m=meth[1:] - if m=='SRC': - m='[a.path_from(cwdx) for a in tsk.inputs]' - elif m=='TGT': - m='[a.path_from(cwdx) for a in tsk.outputs]' - elif re_novar.match(m): - m='[tsk.inputs%s]'%m[3:] - elif re_novar.match(m): - m='[tsk.outputs%s]'%m[3:] - elif m[:3]not in('tsk','gen','bld'): - dvars.extend([var,m]) - m='%r'%m - app('lst.extend(tsk.colon(%r, %s))'%(var,m)) - else: - app('lst.extend(gen.to_list(%s%s))'%(var,meth)) - else: - app('lst.extend(to_list(env[%r]))'%var) - if not var in dvars:dvars.append(var) - if extr: - if params[-1]: - app("lst.extend(%r)"%params[-1].split()) - fun=COMPILE_TEMPLATE_NOSHELL%"\n\t".join(buf) - Logs.debug('action: %s'%fun.strip().splitlines()) - return(funex(fun),dvars) -def compile_fun(line,shell=False): - if isinstance(line,str): - if line.find('<')>0 or line.find('>')>0 or line.find('&&')>0: - shell=True - else: - dvars_lst=[] - funs_lst=[] - for x in line: - if isinstance(x,str): - fun,dvars=compile_fun(x,shell) - dvars_lst+=dvars - funs_lst.append(fun) - else: - funs_lst.append(x) - def composed_fun(task): - for x in funs_lst: - ret=x(task) - if ret: - return ret - return None - return composed_fun,dvars - if shell: - return compile_fun_shell(line) - else: - return compile_fun_noshell(line) -def task_factory(name,func=None,vars=None,color='GREEN',ext_in=[],ext_out=[],before=[],after=[],shell=False,scan=None): - params={'vars':vars or[],'color':color,'name':name,'ext_in':Utils.to_list(ext_in),'ext_out':Utils.to_list(ext_out),'before':Utils.to_list(before),'after':Utils.to_list(after),'shell':shell,'scan':scan,} - if isinstance(func,str)or isinstance(func,tuple): - params['run_str']=func - else: - params['run']=func - cls=type(Task)(name,(Task,),params) - global classes - classes[name]=cls - return cls -def always_run(cls): - old=cls.runnable_status - def always(self): - ret=old(self) - if ret==SKIP_ME: - ret=RUN_ME - return ret - cls.runnable_status=always - return cls -def update_outputs(cls): - old_post_run=cls.post_run - def post_run(self): - old_post_run(self) - for node in self.outputs: - node.sig=node.cache_sig=Utils.h_file(node.abspath()) - self.generator.bld.task_sigs[node.abspath()]=self.uid() - cls.post_run=post_run - old_runnable_status=cls.runnable_status - def runnable_status(self): - status=old_runnable_status(self) - if status!=RUN_ME: - return status - try: - bld=self.generator.bld - prev_sig=bld.task_sigs[self.uid()] - if prev_sig==self.signature(): - for x in self.outputs: - if not x.is_child_of(bld.bldnode): - x.sig=Utils.h_file(x.abspath()) - if not x.sig or bld.task_sigs[x.abspath()]!=self.uid(): - return RUN_ME - return SKIP_ME - except OSError: - pass - except IOError: - pass - except KeyError: - pass - except IndexError: - pass - except AttributeError: - pass - return RUN_ME - cls.runnable_status=runnable_status - return cls diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py deleted file mode 100644 index d9f770ce..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/TaskGen.py +++ /dev/null @@ -1,433 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import copy,re,os -from waflib import Task,Utils,Logs,Errors,ConfigSet,Node -feats=Utils.defaultdict(set) -HEADER_EXTS=['.h','.hpp','.hxx','.hh'] -class task_gen(object): - mappings=Utils.ordered_iter_dict() - prec=Utils.defaultdict(list) - def __init__(self,*k,**kw): - self.source='' - self.target='' - self.meths=[] - self.prec=Utils.defaultdict(list) - self.mappings={} - self.features=[] - self.tasks=[] - if not'bld'in kw: - self.env=ConfigSet.ConfigSet() - self.idx=0 - self.path=None - else: - self.bld=kw['bld'] - self.env=self.bld.env.derive() - self.path=self.bld.path - try: - self.idx=self.bld.idx[id(self.path)]=self.bld.idx.get(id(self.path),0)+1 - except AttributeError: - self.bld.idx={} - self.idx=self.bld.idx[id(self.path)]=1 - for key,val in kw.items(): - setattr(self,key,val) - def __str__(self): - return""%(self.name,self.path.abspath()) - def __repr__(self): - lst=[] - for x in self.__dict__.keys(): - if x not in('env','bld','compiled_tasks','tasks'): - lst.append("%s=%s"%(x,repr(getattr(self,x)))) - return"bld(%s) in %s"%(", ".join(lst),self.path.abspath()) - def get_name(self): - try: - return self._name - except AttributeError: - if isinstance(self.target,list): - lst=[str(x)for x in self.target] - name=self._name=','.join(lst) - else: - name=self._name=str(self.target) - return name - def set_name(self,name): - self._name=name - name=property(get_name,set_name) - def to_list(self,val): - if isinstance(val,str):return val.split() - else:return val - def post(self): - if getattr(self,'posted',None): - return False - self.posted=True - keys=set(self.meths) - self.features=Utils.to_list(self.features) - for x in self.features+['*']: - st=feats[x] - if not st: - if not x in Task.classes: - Logs.warn('feature %r does not exist - bind at least one method to it'%x) - keys.update(list(st)) - prec={} - prec_tbl=self.prec or task_gen.prec - for x in prec_tbl: - if x in keys: - prec[x]=prec_tbl[x] - tmp=[] - for a in keys: - for x in prec.values(): - if a in x:break - else: - tmp.append(a) - tmp.sort() - out=[] - while tmp: - e=tmp.pop() - if e in keys:out.append(e) - try: - nlst=prec[e] - except KeyError: - pass - else: - del prec[e] - for x in nlst: - for y in prec: - if x in prec[y]: - break - else: - tmp.append(x) - if prec: - raise Errors.WafError('Cycle detected in the method execution %r'%prec) - out.reverse() - self.meths=out - Logs.debug('task_gen: posting %s %d'%(self,id(self))) - for x in out: - try: - v=getattr(self,x) - except AttributeError: - raise Errors.WafError('%r is not a valid task generator method'%x) - Logs.debug('task_gen: -> %s (%d)'%(x,id(self))) - v() - Logs.debug('task_gen: posted %s'%self.name) - return True - def get_hook(self,node): - name=node.name - if self.mappings: - for k in self.mappings: - if name.endswith(k): - return self.mappings[k] - for k in task_gen.mappings: - if name.endswith(k): - return task_gen.mappings[k] - raise Errors.WafError("File %r has no mapping in %r (have you forgotten to load a waf tool?)"%(node,task_gen.mappings.keys())) - def create_task(self,name,src=None,tgt=None,**kw): - task=Task.classes[name](env=self.env.derive(),generator=self) - if src: - task.set_inputs(src) - if tgt: - task.set_outputs(tgt) - task.__dict__.update(kw) - self.tasks.append(task) - return task - def clone(self,env): - newobj=self.bld() - for x in self.__dict__: - if x in('env','bld'): - continue - elif x in('path','features'): - setattr(newobj,x,getattr(self,x)) - else: - setattr(newobj,x,copy.copy(getattr(self,x))) - newobj.posted=False - if isinstance(env,str): - newobj.env=self.bld.all_envs[env].derive() - else: - newobj.env=env.derive() - return newobj -def declare_chain(name='',rule=None,reentrant=None,color='BLUE',ext_in=[],ext_out=[],before=[],after=[],decider=None,scan=None,install_path=None,shell=False): - ext_in=Utils.to_list(ext_in) - ext_out=Utils.to_list(ext_out) - if not name: - name=rule - cls=Task.task_factory(name,rule,color=color,ext_in=ext_in,ext_out=ext_out,before=before,after=after,scan=scan,shell=shell) - def x_file(self,node): - ext=decider and decider(self,node)or cls.ext_out - if ext_in: - _ext_in=ext_in[0] - tsk=self.create_task(name,node) - cnt=0 - keys=set(self.mappings.keys())|set(self.__class__.mappings.keys()) - for x in ext: - k=node.change_ext(x,ext_in=_ext_in) - tsk.outputs.append(k) - if reentrant!=None: - if cnt=','exact-version':'==','max-version':'<=',} -SNIP_FUNCTION=''' -int main(int argc, char **argv) { - void (*p)(); - (void)argc; (void)argv; - p=(void(*)())(%s); - return !p; -} -''' -SNIP_TYPE=''' -int main(int argc, char **argv) { - (void)argc; (void)argv; - if ((%(type_name)s *) 0) return 0; - if (sizeof (%(type_name)s)) return 0; - return 1; -} -''' -SNIP_EMPTY_PROGRAM=''' -int main(int argc, char **argv) { - (void)argc; (void)argv; - return 0; -} -''' -SNIP_FIELD=''' -int main(int argc, char **argv) { - char *off; - (void)argc; (void)argv; - off = (char*) &((%(type_name)s*)0)->%(field_name)s; - return (size_t) off < sizeof(%(type_name)s); -} -''' -MACRO_TO_DESTOS={'__linux__':'linux','__GNU__':'gnu','__FreeBSD__':'freebsd','__NetBSD__':'netbsd','__OpenBSD__':'openbsd','__sun':'sunos','__hpux':'hpux','__sgi':'irix','_AIX':'aix','__CYGWIN__':'cygwin','__MSYS__':'cygwin','_UWIN':'uwin','_WIN64':'win32','_WIN32':'win32','__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__':'darwin','__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__':'darwin','__QNX__':'qnx','__native_client__':'nacl'} -MACRO_TO_DEST_CPU={'__x86_64__':'x86_64','__amd64__':'x86_64','__i386__':'x86','__ia64__':'ia','__mips__':'mips','__sparc__':'sparc','__alpha__':'alpha','__aarch64__':'aarch64','__thumb__':'thumb','__arm__':'arm','__hppa__':'hppa','__powerpc__':'powerpc','__ppc__':'powerpc','__convex__':'convex','__m68k__':'m68k','__s390x__':'s390x','__s390__':'s390','__sh__':'sh',} -@conf -def parse_flags(self,line,uselib_store,env=None,force_static=False,posix=None): - assert(isinstance(line,str)) - env=env or self.env - if posix is None: - posix=True - if'\\'in line: - posix=('\\ 'in line)or('\\\\'in line) - lex=shlex.shlex(line,posix=posix) - lex.whitespace_split=True - lex.commenters='' - lst=list(lex) - app=env.append_value - appu=env.append_unique - uselib=uselib_store - static=False - while lst: - x=lst.pop(0) - st=x[:2] - ot=x[2:] - if st=='-I'or st=='/I': - if not ot:ot=lst.pop(0) - appu('INCLUDES_'+uselib,[ot]) - elif st=='-i': - tmp=[x,lst.pop(0)] - app('CFLAGS',tmp) - app('CXXFLAGS',tmp) - elif st=='-D'or(env.CXX_NAME=='msvc'and st=='/D'): - if not ot:ot=lst.pop(0) - app('DEFINES_'+uselib,[ot]) - elif st=='-l': - if not ot:ot=lst.pop(0) - prefix=(force_static or static)and'STLIB_'or'LIB_' - appu(prefix+uselib,[ot]) - elif st=='-L': - if not ot:ot=lst.pop(0) - prefix=(force_static or static)and'STLIBPATH_'or'LIBPATH_' - appu(prefix+uselib,[ot]) - elif x.startswith('/LIBPATH:'): - prefix=(force_static or static)and'STLIBPATH_'or'LIBPATH_' - appu(prefix+uselib,[x.replace('/LIBPATH:','')]) - elif x=='-pthread'or x.startswith('+')or x.startswith('-std'): - app('CFLAGS_'+uselib,[x]) - app('CXXFLAGS_'+uselib,[x]) - app('LINKFLAGS_'+uselib,[x]) - elif x=='-framework': - appu('FRAMEWORK_'+uselib,[lst.pop(0)]) - elif x.startswith('-F'): - appu('FRAMEWORKPATH_'+uselib,[x[2:]]) - elif x=='-Wl,-rpath'or x=='-Wl,-R': - app('RPATH_'+uselib,lst.pop(0).lstrip('-Wl,')) - elif x.startswith('-Wl,-R,'): - app('RPATH_'+uselib,x[7:]) - elif x.startswith('-Wl,-R'): - app('RPATH_'+uselib,x[6:]) - elif x.startswith('-Wl,-rpath,'): - app('RPATH_'+uselib,x[11:]) - elif x=='-Wl,-Bstatic'or x=='-Bstatic': - static=True - elif x=='-Wl,-Bdynamic'or x=='-Bdynamic': - static=False - elif x.startswith('-Wl'): - app('LINKFLAGS_'+uselib,[x]) - elif x.startswith('-m')or x.startswith('-f')or x.startswith('-dynamic'): - app('CFLAGS_'+uselib,[x]) - app('CXXFLAGS_'+uselib,[x]) - elif x.startswith('-bundle'): - app('LINKFLAGS_'+uselib,[x]) - elif x.startswith('-undefined')or x.startswith('-Xlinker'): - arg=lst.pop(0) - app('LINKFLAGS_'+uselib,[x,arg]) - elif x.startswith('-arch')or x.startswith('-isysroot'): - tmp=[x,lst.pop(0)] - app('CFLAGS_'+uselib,tmp) - app('CXXFLAGS_'+uselib,tmp) - app('LINKFLAGS_'+uselib,tmp) - elif x.endswith('.a')or x.endswith('.so')or x.endswith('.dylib')or x.endswith('.lib'): - appu('LINKFLAGS_'+uselib,[x]) -@conf -def validate_cfg(self,kw): - if not'path'in kw: - if not self.env.PKGCONFIG: - self.find_program('pkg-config',var='PKGCONFIG') - kw['path']=self.env.PKGCONFIG - if'atleast_pkgconfig_version'in kw: - if not'msg'in kw: - kw['msg']='Checking for pkg-config version >= %r'%kw['atleast_pkgconfig_version'] - return - if not'okmsg'in kw: - kw['okmsg']='yes' - if not'errmsg'in kw: - kw['errmsg']='not found' - if'modversion'in kw: - if not'msg'in kw: - kw['msg']='Checking for %r version'%kw['modversion'] - return - for x in cfg_ver.keys(): - y=x.replace('-','_') - if y in kw: - if not'package'in kw: - raise ValueError('%s requires a package'%x) - if not'msg'in kw: - kw['msg']='Checking for %r %s %s'%(kw['package'],cfg_ver[x],kw[y]) - return - if not'define_name'in kw: - pkgname=kw.get('uselib_store',kw['package'].upper()) - kw['define_name']=self.have_define(pkgname) - if not'uselib_store'in kw: - self.undefine(kw['define_name']) - if not'msg'in kw: - kw['msg']='Checking for %r'%(kw['package']or kw['path']) -@conf -def exec_cfg(self,kw): - path=Utils.to_list(kw['path']) - env=self.env.env or None - def define_it(): - pkgname=kw.get('uselib_store',kw['package'].upper()) - if kw.get('global_define'): - self.define(self.have_define(kw['package']),1,False) - else: - self.env.append_unique('DEFINES_%s'%pkgname,"%s=1"%self.have_define(pkgname)) - self.env[self.have_define(pkgname)]=1 - if'atleast_pkgconfig_version'in kw: - cmd=path+['--atleast-pkgconfig-version=%s'%kw['atleast_pkgconfig_version']] - self.cmd_and_log(cmd,env=env) - if not'okmsg'in kw: - kw['okmsg']='yes' - return - for x in cfg_ver: - y=x.replace('-','_') - if y in kw: - self.cmd_and_log(path+['--%s=%s'%(x,kw[y]),kw['package']],env=env) - if not'okmsg'in kw: - kw['okmsg']='yes' - define_it() - break - if'modversion'in kw: - version=self.cmd_and_log(path+['--modversion',kw['modversion']],env=env).strip() - self.define('%s_VERSION'%Utils.quote_define_name(kw.get('uselib_store',kw['modversion'])),version) - return version - lst=[]+path - defi=kw.get('define_variable',None) - if not defi: - defi=self.env.PKG_CONFIG_DEFINES or{} - for key,val in defi.items(): - lst.append('--define-variable=%s=%s'%(key,val)) - static=kw.get('force_static',False) - if'args'in kw: - args=Utils.to_list(kw['args']) - if'--static'in args or'--static-libs'in args: - static=True - lst+=args - lst.extend(Utils.to_list(kw['package'])) - if'variables'in kw: - v_env=kw.get('env',self.env) - uselib=kw.get('uselib_store',kw['package'].upper()) - vars=Utils.to_list(kw['variables']) - for v in vars: - val=self.cmd_and_log(lst+['--variable='+v],env=env).strip() - var='%s_%s'%(uselib,v) - v_env[var]=val - if not'okmsg'in kw: - kw['okmsg']='yes' - return - ret=self.cmd_and_log(lst,env=env) - if not'okmsg'in kw: - kw['okmsg']='yes' - define_it() - self.parse_flags(ret,kw.get('uselib_store',kw['package'].upper()),kw.get('env',self.env),force_static=static,posix=kw.get('posix',None)) - return ret -@conf -def check_cfg(self,*k,**kw): - if k: - lst=k[0].split() - kw['package']=lst[0] - kw['args']=' '.join(lst[1:]) - self.validate_cfg(kw) - if'msg'in kw: - self.start_msg(kw['msg'],**kw) - ret=None - try: - ret=self.exec_cfg(kw) - except self.errors.WafError: - if'errmsg'in kw: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - if Logs.verbose>1: - raise - else: - self.fatal('The configuration failed') - else: - if not ret: - ret=True - kw['success']=ret - if'okmsg'in kw: - self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) - return ret -def build_fun(bld): - if bld.kw['compile_filename']: - node=bld.srcnode.make_node(bld.kw['compile_filename']) - node.write(bld.kw['code']) - o=bld(features=bld.kw['features'],source=bld.kw['compile_filename'],target='testprog') - for k,v in bld.kw.items(): - setattr(o,k,v) - if not bld.kw.get('quiet',None): - bld.conf.to_log("==>\n%s\n<=="%bld.kw['code']) -@conf -def validate_c(self,kw): - if not'build_fun'in kw: - kw['build_fun']=build_fun - if not'env'in kw: - kw['env']=self.env.derive() - env=kw['env'] - if not'compiler'in kw and not'features'in kw: - kw['compiler']='c' - if env['CXX_NAME']and Task.classes.get('cxx',None): - kw['compiler']='cxx' - if not self.env['CXX']: - self.fatal('a c++ compiler is required') - else: - if not self.env['CC']: - self.fatal('a c compiler is required') - if not'compile_mode'in kw: - kw['compile_mode']='c' - if'cxx'in Utils.to_list(kw.get('features',[]))or kw.get('compiler','')=='cxx': - kw['compile_mode']='cxx' - if not'type'in kw: - kw['type']='cprogram' - if not'features'in kw: - if not'header_name'in kw or kw.get('link_header_test',True): - kw['features']=[kw['compile_mode'],kw['type']] - else: - kw['features']=[kw['compile_mode']] - else: - kw['features']=Utils.to_list(kw['features']) - if not'compile_filename'in kw: - kw['compile_filename']='test.c'+((kw['compile_mode']=='cxx')and'pp'or'') - def to_header(dct): - if'header_name'in dct: - dct=Utils.to_list(dct['header_name']) - return''.join(['#include <%s>\n'%x for x in dct]) - return'' - if'framework_name'in kw: - fwkname=kw['framework_name'] - if not'uselib_store'in kw: - kw['uselib_store']=fwkname.upper() - if not kw.get('no_header',False): - if not'header_name'in kw: - kw['header_name']=[] - fwk='%s/%s.h'%(fwkname,fwkname) - if kw.get('remove_dot_h',None): - fwk=fwk[:-2] - kw['header_name']=Utils.to_list(kw['header_name'])+[fwk] - kw['msg']='Checking for framework %s'%fwkname - kw['framework']=fwkname - if'function_name'in kw: - fu=kw['function_name'] - if not'msg'in kw: - kw['msg']='Checking for function %s'%fu - kw['code']=to_header(kw)+SNIP_FUNCTION%fu - if not'uselib_store'in kw: - kw['uselib_store']=fu.upper() - if not'define_name'in kw: - kw['define_name']=self.have_define(fu) - elif'type_name'in kw: - tu=kw['type_name'] - if not'header_name'in kw: - kw['header_name']='stdint.h' - if'field_name'in kw: - field=kw['field_name'] - kw['code']=to_header(kw)+SNIP_FIELD%{'type_name':tu,'field_name':field} - if not'msg'in kw: - kw['msg']='Checking for field %s in %s'%(field,tu) - if not'define_name'in kw: - kw['define_name']=self.have_define((tu+'_'+field).upper()) - else: - kw['code']=to_header(kw)+SNIP_TYPE%{'type_name':tu} - if not'msg'in kw: - kw['msg']='Checking for type %s'%tu - if not'define_name'in kw: - kw['define_name']=self.have_define(tu.upper()) - elif'header_name'in kw: - if not'msg'in kw: - kw['msg']='Checking for header %s'%kw['header_name'] - l=Utils.to_list(kw['header_name']) - assert len(l)>0,'list of headers in header_name is empty' - kw['code']=to_header(kw)+SNIP_EMPTY_PROGRAM - if not'uselib_store'in kw: - kw['uselib_store']=l[0].upper() - if not'define_name'in kw: - kw['define_name']=self.have_define(l[0]) - if'lib'in kw: - if not'msg'in kw: - kw['msg']='Checking for library %s'%kw['lib'] - if not'uselib_store'in kw: - kw['uselib_store']=kw['lib'].upper() - if'stlib'in kw: - if not'msg'in kw: - kw['msg']='Checking for static library %s'%kw['stlib'] - if not'uselib_store'in kw: - kw['uselib_store']=kw['stlib'].upper() - if'fragment'in kw: - kw['code']=kw['fragment'] - if not'msg'in kw: - kw['msg']='Checking for code snippet' - if not'errmsg'in kw: - kw['errmsg']='no' - for(flagsname,flagstype)in(('cxxflags','compiler'),('cflags','compiler'),('linkflags','linker')): - if flagsname in kw: - if not'msg'in kw: - kw['msg']='Checking for %s flags %s'%(flagstype,kw[flagsname]) - if not'errmsg'in kw: - kw['errmsg']='no' - if not'execute'in kw: - kw['execute']=False - if kw['execute']: - kw['features'].append('test_exec') - kw['chmod']=493 - if not'errmsg'in kw: - kw['errmsg']='not found' - if not'okmsg'in kw: - kw['okmsg']='yes' - if not'code'in kw: - kw['code']=SNIP_EMPTY_PROGRAM - if self.env[INCKEYS]: - kw['code']='\n'.join(['#include <%s>'%x for x in self.env[INCKEYS]])+'\n'+kw['code'] - if kw.get('merge_config_header',False)or env.merge_config_header: - kw['code']='%s\n\n%s'%(self.get_config_header(),kw['code']) - env.DEFINES=[] - if not kw.get('success'):kw['success']=None - if'define_name'in kw: - self.undefine(kw['define_name']) - if not'msg'in kw: - self.fatal('missing "msg" in conf.check(...)') -@conf -def post_check(self,*k,**kw): - is_success=0 - if kw['execute']: - if kw['success']is not None: - if kw.get('define_ret',False): - is_success=kw['success'] - else: - is_success=(kw['success']==0) - else: - is_success=(kw['success']==0) - if'define_name'in kw: - if'header_name'in kw or'function_name'in kw or'type_name'in kw or'fragment'in kw: - if kw['execute']and kw.get('define_ret',None)and isinstance(is_success,str): - self.define(kw['define_name'],is_success,quote=kw.get('quote',1)) - else: - self.define_cond(kw['define_name'],is_success) - else: - self.define_cond(kw['define_name'],is_success) - if kw.get('global_define',None): - self.env[kw['define_name']]=is_success - if'header_name'in kw: - if kw.get('auto_add_header_name',False): - self.env.append_value(INCKEYS,Utils.to_list(kw['header_name'])) - if is_success and'uselib_store'in kw: - from waflib.Tools import ccroot - _vars=set([]) - for x in kw['features']: - if x in ccroot.USELIB_VARS: - _vars|=ccroot.USELIB_VARS[x] - for k in _vars: - lk=k.lower() - if lk in kw: - val=kw[lk] - if isinstance(val,str): - val=val.rstrip(os.path.sep) - self.env.append_unique(k+'_'+kw['uselib_store'],Utils.to_list(val)) - return is_success -@conf -def check(self,*k,**kw): - self.validate_c(kw) - self.start_msg(kw['msg'],**kw) - ret=None - try: - ret=self.run_build(*k,**kw) - except self.errors.ConfigurationError: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - if Logs.verbose>1: - raise - else: - self.fatal('The configuration failed') - else: - kw['success']=ret - ret=self.post_check(*k,**kw) - if not ret: - self.end_msg(kw['errmsg'],'YELLOW',**kw) - self.fatal('The configuration failed %r'%ret) - else: - self.end_msg(self.ret_msg(kw['okmsg'],kw),**kw) - return ret -class test_exec(Task.Task): - color='PINK' - def run(self): - if getattr(self.generator,'rpath',None): - if getattr(self.generator,'define_ret',False): - self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) - else: - self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()]) - else: - env=self.env.env or{} - env.update(dict(os.environ)) - for var in('LD_LIBRARY_PATH','DYLD_LIBRARY_PATH','PATH'): - env[var]=self.inputs[0].parent.abspath()+os.path.pathsep+env.get(var,'') - if getattr(self.generator,'define_ret',False): - self.generator.bld.retval=self.generator.bld.cmd_and_log([self.inputs[0].abspath()],env=env) - else: - self.generator.bld.retval=self.generator.bld.exec_command([self.inputs[0].abspath()],env=env) -@feature('test_exec') -@after_method('apply_link') -def test_exec_fun(self): - self.create_task('test_exec',self.link_task.outputs[0]) -@conf -def check_cxx(self,*k,**kw): - kw['compiler']='cxx' - return self.check(*k,**kw) -@conf -def check_cc(self,*k,**kw): - kw['compiler']='c' - return self.check(*k,**kw) -@conf -def set_define_comment(self,key,comment): - coms=self.env.DEFINE_COMMENTS - if not coms: - coms=self.env.DEFINE_COMMENTS={} - coms[key]=comment or'' -@conf -def get_define_comment(self,key): - coms=self.env.DEFINE_COMMENTS or{} - return coms.get(key,'') -@conf -def define(self,key,val,quote=True,comment=''): - assert key and isinstance(key,str) - if val is True: - val=1 - elif val in(False,None): - val=0 - if isinstance(val,int)or isinstance(val,float): - s='%s=%s' - else: - s=quote and'%s="%s"'or'%s=%s' - app=s%(key,str(val)) - ban=key+'=' - lst=self.env['DEFINES'] - for x in lst: - if x.startswith(ban): - lst[lst.index(x)]=app - break - else: - self.env.append_value('DEFINES',app) - self.env.append_unique(DEFKEYS,key) - self.set_define_comment(key,comment) -@conf -def undefine(self,key,comment=''): - assert key and isinstance(key,str) - ban=key+'=' - lst=[x for x in self.env['DEFINES']if not x.startswith(ban)] - self.env['DEFINES']=lst - self.env.append_unique(DEFKEYS,key) - self.set_define_comment(key,comment) -@conf -def define_cond(self,key,val): - assert key and isinstance(key,str) - if val: - self.define(key,1) - else: - self.undefine(key) -@conf -def is_defined(self,key): - assert key and isinstance(key,str) - ban=key+'=' - for x in self.env['DEFINES']: - if x.startswith(ban): - return True - return False -@conf -def get_define(self,key): - assert key and isinstance(key,str) - ban=key+'=' - for x in self.env['DEFINES']: - if x.startswith(ban): - return x[len(ban):] - return None -@conf -def have_define(self,key): - return(self.env.HAVE_PAT or'HAVE_%s')%Utils.quote_define_name(key) -@conf -def write_config_header(self,configfile='',guard='',top=False,defines=True,headers=False,remove=True,define_prefix=''): - if not configfile:configfile=WAF_CONFIG_H - waf_guard=guard or'W_%s_WAF'%Utils.quote_define_name(configfile) - node=top and self.bldnode or self.path.get_bld() - node=node.make_node(configfile) - node.parent.mkdir() - lst=['/* WARNING! All changes made to this file will be lost! */\n'] - lst.append('#ifndef %s\n#define %s\n'%(waf_guard,waf_guard)) - lst.append(self.get_config_header(defines,headers,define_prefix=define_prefix)) - lst.append('\n#endif /* %s */\n'%waf_guard) - node.write('\n'.join(lst)) - self.env.append_unique(Build.CFG_FILES,[node.abspath()]) - if remove: - for key in self.env[DEFKEYS]: - self.undefine(key) - self.env[DEFKEYS]=[] -@conf -def get_config_header(self,defines=True,headers=False,define_prefix=''): - lst=[] - if self.env.WAF_CONFIG_H_PRELUDE: - lst.append(self.env.WAF_CONFIG_H_PRELUDE) - if headers: - for x in self.env[INCKEYS]: - lst.append('#include <%s>'%x) - if defines: - tbl={} - for k in self.env['DEFINES']: - a,_,b=k.partition('=') - tbl[a]=b - for k in self.env[DEFKEYS]: - caption=self.get_define_comment(k) - if caption: - caption=' /* %s */'%caption - try: - txt='#define %s%s %s%s'%(define_prefix,k,tbl[k],caption) - except KeyError: - txt='/* #undef %s%s */%s'%(define_prefix,k,caption) - lst.append(txt) - return"\n".join(lst) -@conf -def cc_add_flags(conf): - conf.add_os_flags('CPPFLAGS',dup=False) - conf.add_os_flags('CFLAGS',dup=False) -@conf -def cxx_add_flags(conf): - conf.add_os_flags('CPPFLAGS',dup=False) - conf.add_os_flags('CXXFLAGS',dup=False) -@conf -def link_add_flags(conf): - conf.add_os_flags('LINKFLAGS',dup=False) - conf.add_os_flags('LDFLAGS',dup=False) -@conf -def cc_load_tools(conf): - if not conf.env.DEST_OS: - conf.env.DEST_OS=Utils.unversioned_sys_platform() - conf.load('c') -@conf -def cxx_load_tools(conf): - if not conf.env.DEST_OS: - conf.env.DEST_OS=Utils.unversioned_sys_platform() - conf.load('cxx') -@conf -def get_cc_version(conf,cc,gcc=False,icc=False,clang=False): - cmd=cc+['-dM','-E','-'] - env=conf.env.env or None - try: - out,err=conf.cmd_and_log(cmd,output=0,input='\n'.encode(),env=env) - except Exception: - conf.fatal('Could not determine the compiler version %r'%cmd) - if gcc: - if out.find('__INTEL_COMPILER')>=0: - conf.fatal('The intel compiler pretends to be gcc') - if out.find('__GNUC__')<0 and out.find('__clang__')<0: - conf.fatal('Could not determine the compiler type') - if icc and out.find('__INTEL_COMPILER')<0: - conf.fatal('Not icc/icpc') - if clang and out.find('__clang__')<0: - conf.fatal('Not clang/clang++') - if not clang and out.find('__clang__')>=0: - conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure') - k={} - if icc or gcc or clang: - out=out.splitlines() - for line in out: - lst=shlex.split(line) - if len(lst)>2: - key=lst[1] - val=lst[2] - k[key]=val - def isD(var): - return var in k - if not conf.env.DEST_OS: - conf.env.DEST_OS='' - for i in MACRO_TO_DESTOS: - if isD(i): - conf.env.DEST_OS=MACRO_TO_DESTOS[i] - break - else: - if isD('__APPLE__')and isD('__MACH__'): - conf.env.DEST_OS='darwin' - elif isD('__unix__'): - conf.env.DEST_OS='generic' - if isD('__ELF__'): - conf.env.DEST_BINFMT='elf' - elif isD('__WINNT__')or isD('__CYGWIN__')or isD('_WIN32'): - conf.env.DEST_BINFMT='pe' - conf.env.LIBDIR=conf.env.BINDIR - elif isD('__APPLE__'): - conf.env.DEST_BINFMT='mac-o' - if not conf.env.DEST_BINFMT: - conf.env.DEST_BINFMT=Utils.destos_to_binfmt(conf.env.DEST_OS) - for i in MACRO_TO_DEST_CPU: - if isD(i): - conf.env.DEST_CPU=MACRO_TO_DEST_CPU[i] - break - Logs.debug('ccroot: dest platform: '+' '.join([conf.env[x]or'?'for x in('DEST_OS','DEST_BINFMT','DEST_CPU')])) - if icc: - ver=k['__INTEL_COMPILER'] - conf.env['CC_VERSION']=(ver[:-2],ver[-2],ver[-1]) - else: - if isD('__clang__')and isD('__clang_major__'): - conf.env['CC_VERSION']=(k['__clang_major__'],k['__clang_minor__'],k['__clang_patchlevel__']) - else: - conf.env['CC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k.get('__GNUC_PATCHLEVEL__','0')) - return k -@conf -def get_xlc_version(conf,cc): - cmd=cc+['-qversion'] - try: - out,err=conf.cmd_and_log(cmd,output=0) - except Errors.WafError: - conf.fatal('Could not find xlc %r'%cmd) - for v in(r"IBM XL C/C\+\+.* V(?P\d*)\.(?P\d*)",): - version_re=re.compile(v,re.I).search - match=version_re(out or err) - if match: - k=match.groupdict() - conf.env['CC_VERSION']=(k['major'],k['minor']) - break - else: - conf.fatal('Could not determine the XLC version.') -@conf -def get_suncc_version(conf,cc): - cmd=cc+['-V'] - try: - out,err=conf.cmd_and_log(cmd,output=0) - except Errors.WafError as e: - if not(hasattr(e,'returncode')and hasattr(e,'stdout')and hasattr(e,'stderr')): - conf.fatal('Could not find suncc %r'%cmd) - out=e.stdout - err=e.stderr - version=(out or err) - version=version.splitlines()[0] - version_re=re.compile(r'cc:\s+sun\s+(c\+\+|c)\s+(?P\d*)\.(?P\d*)',re.I).search - match=version_re(version) - if match: - k=match.groupdict() - conf.env['CC_VERSION']=(k['major'],k['minor']) - else: - conf.fatal('Could not determine the suncc version.') -@conf -def add_as_needed(self): - if self.env.DEST_BINFMT=='elf'and'gcc'in(self.env.CXX_NAME,self.env.CC_NAME): - self.env.append_unique('LINKFLAGS','-Wl,--as-needed') -class cfgtask(Task.TaskBase): - def display(self): - return'' - def runnable_status(self): - return Task.RUN_ME - def uid(self): - return Utils.SIG_NIL - def run(self): - conf=self.conf - bld=Build.BuildContext(top_dir=conf.srcnode.abspath(),out_dir=conf.bldnode.abspath()) - bld.env=conf.env - bld.init_dirs() - bld.in_msg=1 - bld.logger=self.logger - try: - bld.check(**self.args) - except Exception: - return 1 -@conf -def multicheck(self,*k,**kw): - self.start_msg(kw.get('msg','Executing %d configuration tests'%len(k)),**kw) - class par(object): - def __init__(self): - self.keep=False - self.returned_tasks=[] - self.task_sigs={} - self.progress_bar=0 - def total(self): - return len(tasks) - def to_log(self,*k,**kw): - return - bld=par() - tasks=[] - for dct in k: - x=cfgtask(bld=bld) - tasks.append(x) - x.args=dct - x.bld=bld - x.conf=self - x.args=dct - x.logger=Logs.make_mem_logger(str(id(x)),self.logger) - def it(): - yield tasks - while 1: - yield[] - p=Runner.Parallel(bld,Options.options.jobs) - p.biter=it() - p.start() - for x in tasks: - x.logger.memhandler.flush() - if p.error: - for x in p.error: - if getattr(x,'err_msg',None): - self.to_log(x.err_msg) - self.end_msg('fail',color='RED') - raise Errors.WafError('There is an error in the library, read config.log for more information') - for x in tasks: - if x.hasrun!=Task.SUCCESS: - self.end_msg(kw.get('errmsg','no'),color='YELLOW',**kw) - self.fatal(kw.get('fatalmsg',None)or'One of the tests has failed, read config.log for more information') - self.end_msg('ok',**kw) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py deleted file mode 100644 index 8cb4bce1..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_osx.py +++ /dev/null @@ -1,137 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,shutil,platform -from waflib import Task,Utils,Errors -from waflib.TaskGen import taskgen_method,feature,after_method,before_method -app_info=''' - - - - - CFBundlePackageType - APPL - CFBundleGetInfoString - Created by Waf - CFBundleSignature - ???? - NOTE - THIS IS A GENERATED FILE, DO NOT MODIFY - CFBundleExecutable - {app_name} - - -''' -@feature('c','cxx') -def set_macosx_deployment_target(self): - if self.env['MACOSX_DEPLOYMENT_TARGET']: - os.environ['MACOSX_DEPLOYMENT_TARGET']=self.env['MACOSX_DEPLOYMENT_TARGET'] - elif'MACOSX_DEPLOYMENT_TARGET'not in os.environ: - if Utils.unversioned_sys_platform()=='darwin': - os.environ['MACOSX_DEPLOYMENT_TARGET']='.'.join(platform.mac_ver()[0].split('.')[:2]) -@taskgen_method -def create_bundle_dirs(self,name,out): - dir=out.parent.find_or_declare(name) - dir.mkdir() - macos=dir.find_or_declare(['Contents','MacOS']) - macos.mkdir() - return dir -def bundle_name_for_output(out): - name=out.name - k=name.rfind('.') - if k>=0: - name=name[:k]+'.app' - else: - name=name+'.app' - return name -@feature('cprogram','cxxprogram') -@after_method('apply_link') -def create_task_macapp(self): - if self.env['MACAPP']or getattr(self,'mac_app',False): - out=self.link_task.outputs[0] - name=bundle_name_for_output(out) - dir=self.create_bundle_dirs(name,out) - n1=dir.find_or_declare(['Contents','MacOS',out.name]) - self.apptask=self.create_task('macapp',self.link_task.outputs,n1) - inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/MacOS/'%name - self.bld.install_files(inst_to,n1,chmod=Utils.O755) - if getattr(self,'mac_files',None): - mac_files_root=getattr(self,'mac_files_root',None) - if isinstance(mac_files_root,str): - mac_files_root=self.path.find_node(mac_files_root) - if not mac_files_root: - self.bld.fatal('Invalid mac_files_root %r'%self.mac_files_root) - res_dir=n1.parent.parent.make_node('Resources') - inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name - for node in self.to_nodes(self.mac_files): - relpath=node.path_from(mac_files_root or node.parent) - self.create_task('macapp',node,res_dir.make_node(relpath)) - self.bld.install_as(os.path.join(inst_to,relpath),node) - if getattr(self,'mac_resources',None): - res_dir=n1.parent.parent.make_node('Resources') - inst_to=getattr(self,'install_path','/Applications')+'/%s/Resources'%name - for x in self.to_list(self.mac_resources): - node=self.path.find_node(x) - if not node: - raise Errors.WafError('Missing mac_resource %r in %r'%(x,self)) - parent=node.parent - if os.path.isdir(node.abspath()): - nodes=node.ant_glob('**') - else: - nodes=[node] - for node in nodes: - rel=node.path_from(parent) - self.create_task('macapp',node,res_dir.make_node(rel)) - self.bld.install_as(inst_to+'/%s'%rel,node) - if getattr(self.bld,'is_install',None): - self.install_task.hasrun=Task.SKIP_ME -@feature('cprogram','cxxprogram') -@after_method('apply_link') -def create_task_macplist(self): - if self.env['MACAPP']or getattr(self,'mac_app',False): - out=self.link_task.outputs[0] - name=bundle_name_for_output(out) - dir=self.create_bundle_dirs(name,out) - n1=dir.find_or_declare(['Contents','Info.plist']) - self.plisttask=plisttask=self.create_task('macplist',[],n1) - plisttask.context={'app_name':self.link_task.outputs[0].name,'env':self.env} - plist_ctx=getattr(self,'plist_context',None) - if(plist_ctx): - plisttask.context.update(plist_ctx) - if getattr(self,'mac_plist',False): - node=self.path.find_resource(self.mac_plist) - if node: - plisttask.inputs.append(node) - else: - plisttask.code=self.mac_plist - else: - plisttask.code=app_info - inst_to=getattr(self,'install_path','/Applications')+'/%s/Contents/'%name - self.bld.install_files(inst_to,n1) -@feature('cshlib','cxxshlib') -@before_method('apply_link','propagate_uselib_vars') -def apply_bundle(self): - if self.env['MACBUNDLE']or getattr(self,'mac_bundle',False): - self.env['LINKFLAGS_cshlib']=self.env['LINKFLAGS_cxxshlib']=[] - self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['macbundle_PATTERN'] - use=self.use=self.to_list(getattr(self,'use',[])) - if not'MACBUNDLE'in use: - use.append('MACBUNDLE') -app_dirs=['Contents','Contents/MacOS','Contents/Resources'] -class macapp(Task.Task): - color='PINK' - def run(self): - self.outputs[0].parent.mkdir() - shutil.copy2(self.inputs[0].srcpath(),self.outputs[0].abspath()) -class macplist(Task.Task): - color='PINK' - ext_in=['.bin'] - def run(self): - if getattr(self,'code',None): - txt=self.code - else: - txt=self.inputs[0].read() - context=getattr(self,'context',{}) - txt=txt.format(**context) - self.outputs[0].write(txt) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py deleted file mode 100644 index 4a2d12cd..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_preproc.py +++ /dev/null @@ -1,611 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re,string,traceback -from waflib import Logs,Utils,Errors -from waflib.Logs import debug,error -class PreprocError(Errors.WafError): - pass -POPFILE='-' -recursion_limit=150 -go_absolute=False -standard_includes=['/usr/include'] -if Utils.is_win32: - standard_includes=[] -use_trigraphs=0 -strict_quotes=0 -g_optrans={'not':'!','not_eq':'!','and':'&&','and_eq':'&=','or':'||','or_eq':'|=','xor':'^','xor_eq':'^=','bitand':'&','bitor':'|','compl':'~',} -re_lines=re.compile('^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',re.IGNORECASE|re.MULTILINE) -re_mac=re.compile("^[a-zA-Z_]\w*") -re_fun=re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]') -re_pragma_once=re.compile('^\s*once\s*',re.IGNORECASE) -re_nl=re.compile('\\\\\r*\n',re.MULTILINE) -re_cpp=re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',re.DOTALL|re.MULTILINE) -trig_def=[('??'+a,b)for a,b in zip("=-/!'()<>",r'#~\|^[]{}')] -chr_esc={'0':0,'a':7,'b':8,'t':9,'n':10,'f':11,'v':12,'r':13,'\\':92,"'":39} -NUM='i' -OP='O' -IDENT='T' -STR='s' -CHAR='c' -tok_types=[NUM,STR,IDENT,OP] -exp_types=[r"""0[xX](?P[a-fA-F0-9]+)(?P[uUlL]*)|L*?'(?P(\\.|[^\\'])+)'|(?P\d+)[Ee](?P[+-]*?\d+)(?P[fFlL]*)|(?P\d*\.\d+)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P\d+\.\d*)([Ee](?P[+-]*?\d+))?(?P[fFlL]*)|(?P0*)(?P\d+)(?P[uUlL]*)""",r'L?"([^"\\]|\\.)*"',r'[a-zA-Z_]\w*',r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',] -re_clexer=re.compile('|'.join(["(?P<%s>%s)"%(name,part)for name,part in zip(tok_types,exp_types)]),re.M) -accepted='a' -ignored='i' -undefined='u' -skipped='s' -def repl(m): - s=m.group(0) - if s.startswith('/'): - return' ' - return s -def filter_comments(filename): - code=Utils.readf(filename) - if use_trigraphs: - for(a,b)in trig_def:code=code.split(a).join(b) - code=re_nl.sub('',code) - code=re_cpp.sub(repl,code) - return[(m.group(2),m.group(3))for m in re.finditer(re_lines,code)] -prec={} -ops=['* / %','+ -','<< >>','< <= >= >','== !=','& | ^','&& ||',','] -for x in range(len(ops)): - syms=ops[x] - for u in syms.split(): - prec[u]=x -def trimquotes(s): - if not s:return'' - s=s.rstrip() - if s[0]=="'"and s[-1]=="'":return s[1:-1] - return s -def reduce_nums(val_1,val_2,val_op): - try:a=0+val_1 - except TypeError:a=int(val_1) - try:b=0+val_2 - except TypeError:b=int(val_2) - d=val_op - if d=='%':c=a%b - elif d=='+':c=a+b - elif d=='-':c=a-b - elif d=='*':c=a*b - elif d=='/':c=a/b - elif d=='^':c=a^b - elif d=='==':c=int(a==b) - elif d=='|'or d=='bitor':c=a|b - elif d=='||'or d=='or':c=int(a or b) - elif d=='&'or d=='bitand':c=a&b - elif d=='&&'or d=='and':c=int(a and b) - elif d=='!='or d=='not_eq':c=int(a!=b) - elif d=='^'or d=='xor':c=int(a^b) - elif d=='<=':c=int(a<=b) - elif d=='<':c=int(a':c=int(a>b) - elif d=='>=':c=int(a>=b) - elif d=='<<':c=a<>':c=a>>b - else:c=0 - return c -def get_num(lst): - if not lst:raise PreprocError("empty list for get_num") - (p,v)=lst[0] - if p==OP: - if v=='(': - count_par=1 - i=1 - while i=prec[v]: - num2=reduce_nums(num,num2,v) - return get_term([(NUM,num2)]+lst) - else: - num3,lst=get_num(lst[1:]) - num3=reduce_nums(num2,num3,v2) - return get_term([(NUM,num),(p,v),(NUM,num3)]+lst) - raise PreprocError("cannot reduce %r"%lst) -def reduce_eval(lst): - num,lst=get_term(lst) - return(NUM,num) -def stringize(lst): - lst=[str(v2)for(p2,v2)in lst] - return"".join(lst) -def paste_tokens(t1,t2): - p1=None - if t1[0]==OP and t2[0]==OP: - p1=OP - elif t1[0]==IDENT and(t2[0]==IDENT or t2[0]==NUM): - p1=IDENT - elif t1[0]==NUM and t2[0]==NUM: - p1=NUM - if not p1: - raise PreprocError('tokens do not make a valid paste %r and %r'%(t1,t2)) - return(p1,t1[1]+t2[1]) -def reduce_tokens(lst,defs,ban=[]): - i=0 - while i=len(lst): - raise PreprocError("expected '(' after %r (got nothing)"%v) - (p2,v2)=lst[i] - if p2!=OP or v2!='(': - raise PreprocError("expected '(' after %r"%v) - del lst[i] - one_param=[] - count_paren=0 - while i1: - (p3,v3)=accu[-1] - (p4,v4)=accu[-2] - if v3=='##': - accu.pop() - if v4==','and pt1: - return(v,[[],t[1:]]) - else: - return(v,[[],[('T','')]]) -re_include=re.compile('^\s*(<(?P.*)>|"(?P.*)")') -def extract_include(txt,defs): - m=re_include.search(txt) - if m: - if m.group('a'):return'<',m.group('a') - if m.group('b'):return'"',m.group('b') - toks=tokenize(txt) - reduce_tokens(toks,defs,['waf_include']) - if not toks: - raise PreprocError("could not parse include %s"%txt) - if len(toks)==1: - if toks[0][0]==STR: - return'"',toks[0][1] - else: - if toks[0][1]=='<'and toks[-1][1]=='>': - ret='<',stringize(toks).lstrip('<').rstrip('>') - return ret - raise PreprocError("could not parse include %s."%txt) -def parse_char(txt): - if not txt:raise PreprocError("attempted to parse a null char") - if txt[0]!='\\': - return ord(txt) - c=txt[1] - if c=='x': - if len(txt)==4 and txt[3]in string.hexdigits:return int(txt[2:],16) - return int(txt[2:],16) - elif c.isdigit(): - if c=='0'and len(txt)==2:return 0 - for i in 3,2,1: - if len(txt)>i and txt[1:1+i].isdigit(): - return(1+i,int(txt[1:1+i],8)) - else: - try:return chr_esc[c] - except KeyError:raise PreprocError("could not parse char literal '%s'"%txt) -def tokenize(s): - return tokenize_private(s)[:] -@Utils.run_once -def tokenize_private(s): - ret=[] - for match in re_clexer.finditer(s): - m=match.group - for name in tok_types: - v=m(name) - if v: - if name==IDENT: - try: - g_optrans[v]; - name=OP - except KeyError: - if v.lower()=="true": - v=1 - name=NUM - elif v.lower()=="false": - v=0 - name=NUM - elif name==NUM: - if m('oct'):v=int(v,8) - elif m('hex'):v=int(m('hex'),16) - elif m('n0'):v=m('n0') - else: - v=m('char') - if v:v=parse_char(v) - else:v=m('n2')or m('n4') - elif name==OP: - if v=='%:':v='#' - elif v=='%:%:':v='##' - elif name==STR: - v=v[1:-1] - ret.append((name,v)) - break - return ret -@Utils.run_once -def define_name(line): - return re_mac.match(line).group(0) -class c_parser(object): - def __init__(self,nodepaths=None,defines=None): - self.lines=[] - if defines is None: - self.defs={} - else: - self.defs=dict(defines) - self.state=[] - self.count_files=0 - self.currentnode_stack=[] - self.nodepaths=nodepaths or[] - self.nodes=[] - self.names=[] - self.curfile='' - self.ban_includes=set([]) - def cached_find_resource(self,node,filename): - try: - nd=node.ctx.cache_nd - except AttributeError: - nd=node.ctx.cache_nd={} - tup=(node,filename) - try: - return nd[tup] - except KeyError: - ret=node.find_resource(filename) - if ret: - if getattr(ret,'children',None): - ret=None - elif ret.is_child_of(node.ctx.bldnode): - tmp=node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode)) - if tmp and getattr(tmp,'children',None): - ret=None - nd[tup]=ret - return ret - def tryfind(self,filename): - if filename.endswith('.moc'): - self.names.append(filename) - return None - self.curfile=filename - found=self.cached_find_resource(self.currentnode_stack[-1],filename) - for n in self.nodepaths: - if found: - break - found=self.cached_find_resource(n,filename) - if found and not found in self.ban_includes: - self.nodes.append(found) - self.addlines(found) - else: - if not filename in self.names: - self.names.append(filename) - return found - def addlines(self,node): - self.currentnode_stack.append(node.parent) - filepath=node.abspath() - self.count_files+=1 - if self.count_files>recursion_limit: - raise PreprocError("recursion limit exceeded") - pc=self.parse_cache - debug('preproc: reading file %r',filepath) - try: - lns=pc[filepath] - except KeyError: - pass - else: - self.lines.extend(lns) - return - try: - lines=filter_comments(filepath) - lines.append((POPFILE,'')) - lines.reverse() - pc[filepath]=lines - self.lines.extend(lines) - except IOError: - raise PreprocError("could not read the file %s"%filepath) - except Exception: - if Logs.verbose>0: - error("parsing %s failed"%filepath) - traceback.print_exc() - def start(self,node,env): - debug('preproc: scanning %s (in %s)',node.name,node.parent.name) - bld=node.ctx - try: - self.parse_cache=bld.parse_cache - except AttributeError: - self.parse_cache=bld.parse_cache={} - self.current_file=node - self.addlines(node) - if env['DEFINES']: - try: - lst=['%s %s'%(x[0],trimquotes('='.join(x[1:])))for x in[y.split('=')for y in env['DEFINES']]] - lst.reverse() - self.lines.extend([('define',x)for x in lst]) - except AttributeError: - pass - while self.lines: - (token,line)=self.lines.pop() - if token==POPFILE: - self.count_files-=1 - self.currentnode_stack.pop() - continue - try: - ve=Logs.verbose - if ve:debug('preproc: line is %s - %s state is %s',token,line,self.state) - state=self.state - if token[:2]=='if': - state.append(undefined) - elif token=='endif': - state.pop() - if token[0]!='e': - if skipped in self.state or ignored in self.state: - continue - if token=='if': - ret=eval_macro(tokenize(line),self.defs) - if ret:state[-1]=accepted - else:state[-1]=ignored - elif token=='ifdef': - m=re_mac.match(line) - if m and m.group(0)in self.defs:state[-1]=accepted - else:state[-1]=ignored - elif token=='ifndef': - m=re_mac.match(line) - if m and m.group(0)in self.defs:state[-1]=ignored - else:state[-1]=accepted - elif token=='include'or token=='import': - (kind,inc)=extract_include(line,self.defs) - if ve:debug('preproc: include found %s (%s) ',inc,kind) - if kind=='"'or not strict_quotes: - self.current_file=self.tryfind(inc) - if token=='import': - self.ban_includes.add(self.current_file) - elif token=='elif': - if state[-1]==accepted: - state[-1]=skipped - elif state[-1]==ignored: - if eval_macro(tokenize(line),self.defs): - state[-1]=accepted - elif token=='else': - if state[-1]==accepted:state[-1]=skipped - elif state[-1]==ignored:state[-1]=accepted - elif token=='define': - try: - self.defs[define_name(line)]=line - except Exception: - raise PreprocError("Invalid define line %s"%line) - elif token=='undef': - m=re_mac.match(line) - if m and m.group(0)in self.defs: - self.defs.__delitem__(m.group(0)) - elif token=='pragma': - if re_pragma_once.match(line.lower()): - self.ban_includes.add(self.current_file) - except Exception as e: - if Logs.verbose: - debug('preproc: line parsing failed (%s): %s %s',e,line,Utils.ex_stack()) -def scan(task): - global go_absolute - try: - incn=task.generator.includes_nodes - except AttributeError: - raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": '%task.generator) - if go_absolute: - nodepaths=incn+[task.generator.bld.root.find_dir(x)for x in standard_includes] - else: - nodepaths=[x for x in incn if x.is_child_of(x.ctx.srcnode)or x.is_child_of(x.ctx.bldnode)] - tmp=c_parser(nodepaths) - tmp.start(task.inputs[0],task.env) - if Logs.verbose: - debug('deps: deps for %r: %r; unresolved %r'%(task.inputs,tmp.nodes,tmp.names)) - return(tmp.nodes,tmp.names) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py deleted file mode 100644 index 7791f238..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/c_tests.py +++ /dev/null @@ -1,152 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib import Task -from waflib.Configure import conf -from waflib.TaskGen import feature,before_method,after_method -LIB_CODE=''' -#ifdef _MSC_VER -#define testEXPORT __declspec(dllexport) -#else -#define testEXPORT -#endif -testEXPORT int lib_func(void) { return 9; } -''' -MAIN_CODE=''' -#ifdef _MSC_VER -#define testEXPORT __declspec(dllimport) -#else -#define testEXPORT -#endif -testEXPORT int lib_func(void); -int main(int argc, char **argv) { - (void)argc; (void)argv; - return !(lib_func() == 9); -} -''' -@feature('link_lib_test') -@before_method('process_source') -def link_lib_test_fun(self): - def write_test_file(task): - task.outputs[0].write(task.generator.code) - rpath=[] - if getattr(self,'add_rpath',False): - rpath=[self.bld.path.get_bld().abspath()] - mode=self.mode - m='%s %s'%(mode,mode) - ex=self.test_exec and'test_exec'or'' - bld=self.bld - bld(rule=write_test_file,target='test.'+mode,code=LIB_CODE) - bld(rule=write_test_file,target='main.'+mode,code=MAIN_CODE) - bld(features='%sshlib'%m,source='test.'+mode,target='test') - bld(features='%sprogram %s'%(m,ex),source='main.'+mode,target='app',use='test',rpath=rpath) -@conf -def check_library(self,mode=None,test_exec=True): - if not mode: - mode='c' - if self.env.CXX: - mode='cxx' - self.check(compile_filename=[],features='link_lib_test',msg='Checking for libraries',mode=mode,test_exec=test_exec,) -INLINE_CODE=''' -typedef int foo_t; -static %s foo_t static_foo () {return 0; } -%s foo_t foo () { - return 0; -} -''' -INLINE_VALUES=['inline','__inline__','__inline'] -@conf -def check_inline(self,**kw): - self.start_msg('Checking for inline') - if not'define_name'in kw: - kw['define_name']='INLINE_MACRO' - if not'features'in kw: - if self.env.CXX: - kw['features']=['cxx'] - else: - kw['features']=['c'] - for x in INLINE_VALUES: - kw['fragment']=INLINE_CODE%(x,x) - try: - self.check(**kw) - except self.errors.ConfigurationError: - continue - else: - self.end_msg(x) - if x!='inline': - self.define('inline',x,quote=False) - return x - self.fatal('could not use inline functions') -LARGE_FRAGMENT='''#include -int main(int argc, char **argv) { - (void)argc; (void)argv; - return !(sizeof(off_t) >= 8); -} -''' -@conf -def check_large_file(self,**kw): - if not'define_name'in kw: - kw['define_name']='HAVE_LARGEFILE' - if not'execute'in kw: - kw['execute']=True - if not'features'in kw: - if self.env.CXX: - kw['features']=['cxx','cxxprogram'] - else: - kw['features']=['c','cprogram'] - kw['fragment']=LARGE_FRAGMENT - kw['msg']='Checking for large file support' - ret=True - try: - if self.env.DEST_BINFMT!='pe': - ret=self.check(**kw) - except self.errors.ConfigurationError: - pass - else: - if ret: - return True - kw['msg']='Checking for -D_FILE_OFFSET_BITS=64' - kw['defines']=['_FILE_OFFSET_BITS=64'] - try: - ret=self.check(**kw) - except self.errors.ConfigurationError: - pass - else: - self.define('_FILE_OFFSET_BITS',64) - return ret - self.fatal('There is no support for large files') -ENDIAN_FRAGMENT=''' -short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 }; -short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 }; -int use_ascii (int i) { - return ascii_mm[i] + ascii_ii[i]; -} -short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 }; -short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 }; -int use_ebcdic (int i) { - return ebcdic_mm[i] + ebcdic_ii[i]; -} -extern int foo; -''' -class grep_for_endianness(Task.Task): - color='PINK' - def run(self): - txt=self.inputs[0].read(flags='rb').decode('iso8859-1') - if txt.find('LiTTleEnDian')>-1: - self.generator.tmp.append('little') - elif txt.find('BIGenDianSyS')>-1: - self.generator.tmp.append('big') - else: - return-1 -@feature('grep_for_endianness') -@after_method('process_source') -def grep_for_endianness_fun(self): - self.create_task('grep_for_endianness',self.compiled_tasks[0].outputs[0]) -@conf -def check_endianness(self): - tmp=[] - def check_msg(self): - return tmp[0] - self.check(fragment=ENDIAN_FRAGMENT,features='c grep_for_endianness',msg="Checking for endianness",define='ENDIANNESS',tmp=tmp,okmsg=check_msg) - return tmp[0] diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py deleted file mode 100644 index 498a0abe..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ccroot.py +++ /dev/null @@ -1,447 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re -from waflib import Task,Utils,Node,Errors -from waflib.TaskGen import after_method,before_method,feature,taskgen_method,extension -from waflib.Tools import c_aliases,c_preproc,c_config,c_osx,c_tests -from waflib.Configure import conf -SYSTEM_LIB_PATHS=['/usr/lib64','/usr/lib','/usr/local/lib64','/usr/local/lib'] -USELIB_VARS=Utils.defaultdict(set) -USELIB_VARS['c']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CCDEPS','CFLAGS','ARCH']) -USELIB_VARS['cxx']=set(['INCLUDES','FRAMEWORKPATH','DEFINES','CPPFLAGS','CXXDEPS','CXXFLAGS','ARCH']) -USELIB_VARS['d']=set(['INCLUDES','DFLAGS']) -USELIB_VARS['includes']=set(['INCLUDES','FRAMEWORKPATH','ARCH']) -USELIB_VARS['cprogram']=USELIB_VARS['cxxprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) -USELIB_VARS['cshlib']=USELIB_VARS['cxxshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS','FRAMEWORK','FRAMEWORKPATH','ARCH','LDFLAGS']) -USELIB_VARS['cstlib']=USELIB_VARS['cxxstlib']=set(['ARFLAGS','LINKDEPS']) -USELIB_VARS['dprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -USELIB_VARS['dshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -USELIB_VARS['dstlib']=set(['ARFLAGS','LINKDEPS']) -USELIB_VARS['asm']=set(['ASFLAGS']) -@taskgen_method -def create_compiled_task(self,name,node): - out='%s.%d.o'%(node.name,self.idx) - task=self.create_task(name,node,node.parent.find_or_declare(out)) - try: - self.compiled_tasks.append(task) - except AttributeError: - self.compiled_tasks=[task] - return task -@taskgen_method -def to_incnodes(self,inlst): - lst=[] - seen=set([]) - for x in self.to_list(inlst): - if x in seen or not x: - continue - seen.add(x) - if isinstance(x,Node.Node): - lst.append(x) - else: - if os.path.isabs(x): - lst.append(self.bld.root.make_node(x)or x) - else: - if x[0]=='#': - p=self.bld.bldnode.make_node(x[1:]) - v=self.bld.srcnode.make_node(x[1:]) - else: - p=self.path.get_bld().make_node(x) - v=self.path.make_node(x) - if p.is_child_of(self.bld.bldnode): - p.mkdir() - lst.append(p) - lst.append(v) - return lst -@feature('c','cxx','d','asm','fc','includes') -@after_method('propagate_uselib_vars','process_source') -def apply_incpaths(self): - lst=self.to_incnodes(self.to_list(getattr(self,'includes',[]))+self.env['INCLUDES']) - self.includes_nodes=lst - self.env['INCPATHS']=[x.abspath()for x in lst] -class link_task(Task.Task): - color='YELLOW' - inst_to=None - chmod=Utils.O755 - def add_target(self,target): - if isinstance(target,str): - pattern=self.env[self.__class__.__name__+'_PATTERN'] - if not pattern: - pattern='%s' - folder,name=os.path.split(target) - if self.__class__.__name__.find('shlib')>0 and getattr(self.generator,'vnum',None): - nums=self.generator.vnum.split('.') - if self.env.DEST_BINFMT=='pe': - name=name+'-'+nums[0] - elif self.env.DEST_OS=='openbsd': - pattern='%s.%s'%(pattern,nums[0]) - if len(nums)>=2: - pattern+='.%s'%nums[1] - if folder: - tmp=folder+os.sep+pattern%name - else: - tmp=pattern%name - target=self.generator.path.find_or_declare(tmp) - self.set_outputs(target) -class stlink_task(link_task): - run_str='${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}' - chmod=Utils.O644 -def rm_tgt(cls): - old=cls.run - def wrap(self): - try:os.remove(self.outputs[0].abspath()) - except OSError:pass - return old(self) - setattr(cls,'run',wrap) -rm_tgt(stlink_task) -@feature('c','cxx','d','fc','asm') -@after_method('process_source') -def apply_link(self): - for x in self.features: - if x=='cprogram'and'cxx'in self.features: - x='cxxprogram' - elif x=='cshlib'and'cxx'in self.features: - x='cxxshlib' - if x in Task.classes: - if issubclass(Task.classes[x],link_task): - link=x - break - else: - return - objs=[t.outputs[0]for t in getattr(self,'compiled_tasks',[])] - self.link_task=self.create_task(link,objs) - self.link_task.add_target(self.target) - try: - inst_to=self.install_path - except AttributeError: - inst_to=self.link_task.__class__.inst_to - if inst_to: - self.install_task=self.bld.install_files(inst_to,self.link_task.outputs[:],env=self.env,chmod=self.link_task.chmod,task=self.link_task) -@taskgen_method -def use_rec(self,name,**kw): - if name in self.tmp_use_not or name in self.tmp_use_seen: - return - try: - y=self.bld.get_tgen_by_name(name) - except Errors.WafError: - self.uselib.append(name) - self.tmp_use_not.add(name) - return - self.tmp_use_seen.append(name) - y.post() - y.tmp_use_objects=objects=kw.get('objects',True) - y.tmp_use_stlib=stlib=kw.get('stlib',True) - try: - link_task=y.link_task - except AttributeError: - y.tmp_use_var='' - else: - objects=False - if not isinstance(link_task,stlink_task): - stlib=False - y.tmp_use_var='LIB' - else: - y.tmp_use_var='STLIB' - p=self.tmp_use_prec - for x in self.to_list(getattr(y,'use',[])): - if self.env["STLIB_"+x]: - continue - try: - p[x].append(name) - except KeyError: - p[x]=[name] - self.use_rec(x,objects=objects,stlib=stlib) -@feature('c','cxx','d','use','fc') -@before_method('apply_incpaths','propagate_uselib_vars') -@after_method('apply_link','process_source') -def process_use(self): - use_not=self.tmp_use_not=set([]) - self.tmp_use_seen=[] - use_prec=self.tmp_use_prec={} - self.uselib=self.to_list(getattr(self,'uselib',[])) - self.includes=self.to_list(getattr(self,'includes',[])) - names=self.to_list(getattr(self,'use',[])) - for x in names: - self.use_rec(x) - for x in use_not: - if x in use_prec: - del use_prec[x] - out=[] - tmp=[] - for x in self.tmp_use_seen: - for k in use_prec.values(): - if x in k: - break - else: - tmp.append(x) - while tmp: - e=tmp.pop() - out.append(e) - try: - nlst=use_prec[e] - except KeyError: - pass - else: - del use_prec[e] - for x in nlst: - for y in use_prec: - if x in use_prec[y]: - break - else: - tmp.append(x) - if use_prec: - raise Errors.WafError('Cycle detected in the use processing %r'%use_prec) - out.reverse() - link_task=getattr(self,'link_task',None) - for x in out: - y=self.bld.get_tgen_by_name(x) - var=y.tmp_use_var - if var and link_task: - if var=='LIB'or y.tmp_use_stlib or x in names: - self.env.append_value(var,[y.target[y.target.rfind(os.sep)+1:]]) - self.link_task.dep_nodes.extend(y.link_task.outputs) - tmp_path=y.link_task.outputs[0].parent.path_from(self.bld.bldnode) - self.env.append_unique(var+'PATH',[tmp_path]) - else: - if y.tmp_use_objects: - self.add_objects_from_tgen(y) - if getattr(y,'export_includes',None): - self.includes.extend(y.to_incnodes(y.export_includes)) - if getattr(y,'export_defines',None): - self.env.append_value('DEFINES',self.to_list(y.export_defines)) - for x in names: - try: - y=self.bld.get_tgen_by_name(x) - except Errors.WafError: - if not self.env['STLIB_'+x]and not x in self.uselib: - self.uselib.append(x) - else: - for k in self.to_list(getattr(y,'use',[])): - if not self.env['STLIB_'+k]and not k in self.uselib: - self.uselib.append(k) -@taskgen_method -def accept_node_to_link(self,node): - return not node.name.endswith('.pdb') -@taskgen_method -def add_objects_from_tgen(self,tg): - try: - link_task=self.link_task - except AttributeError: - pass - else: - for tsk in getattr(tg,'compiled_tasks',[]): - for x in tsk.outputs: - if self.accept_node_to_link(x): - link_task.inputs.append(x) -@taskgen_method -def get_uselib_vars(self): - _vars=set([]) - for x in self.features: - if x in USELIB_VARS: - _vars|=USELIB_VARS[x] - return _vars -@feature('c','cxx','d','fc','javac','cs','uselib','asm') -@after_method('process_use') -def propagate_uselib_vars(self): - _vars=self.get_uselib_vars() - env=self.env - app=env.append_value - feature_uselib=self.features+self.to_list(getattr(self,'uselib',[])) - for var in _vars: - y=var.lower() - val=getattr(self,y,[]) - if val: - app(var,self.to_list(val)) - for x in feature_uselib: - val=env['%s_%s'%(var,x)] - if val: - app(var,val) -@feature('cshlib','cxxshlib','fcshlib') -@after_method('apply_link') -def apply_implib(self): - if not self.env.DEST_BINFMT=='pe': - return - dll=self.link_task.outputs[0] - if isinstance(self.target,Node.Node): - name=self.target.name - else: - name=os.path.split(self.target)[1] - implib=self.env['implib_PATTERN']%name - implib=dll.parent.find_or_declare(implib) - self.env.append_value('LINKFLAGS',self.env['IMPLIB_ST']%implib.bldpath()) - self.link_task.outputs.append(implib) - if getattr(self,'defs',None)and self.env.DEST_BINFMT=='pe': - node=self.path.find_resource(self.defs) - if not node: - raise Errors.WafError('invalid def file %r'%self.defs) - if'msvc'in(self.env.CC_NAME,self.env.CXX_NAME): - self.env.append_value('LINKFLAGS','/def:%s'%node.path_from(self.bld.bldnode)) - self.link_task.dep_nodes.append(node) - else: - self.link_task.inputs.append(node) - if getattr(self,'install_task',None): - try: - inst_to=self.install_path_implib - except AttributeError: - try: - inst_to=self.install_path - except AttributeError: - inst_to='${IMPLIBDIR}' - self.install_task.dest='${BINDIR}' - if not self.env.IMPLIBDIR: - self.env.IMPLIBDIR=self.env.LIBDIR - self.implib_install_task=self.bld.install_files(inst_to,implib,env=self.env,chmod=self.link_task.chmod,task=self.link_task) -re_vnum=re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$') -@feature('cshlib','cxxshlib','dshlib','fcshlib','vnum') -@after_method('apply_link','propagate_uselib_vars') -def apply_vnum(self): - if not getattr(self,'vnum','')or os.name!='posix'or self.env.DEST_BINFMT not in('elf','mac-o'): - return - link=self.link_task - if not re_vnum.match(self.vnum): - raise Errors.WafError('Invalid vnum %r for target %r'%(self.vnum,getattr(self,'name',self))) - nums=self.vnum.split('.') - node=link.outputs[0] - cnum=getattr(self,'cnum',str(nums[0])) - cnums=cnum.split('.') - if len(cnums)>len(nums)or nums[0:len(cnums)]!=cnums: - raise Errors.WafError('invalid compatibility version %s'%cnum) - libname=node.name - if libname.endswith('.dylib'): - name3=libname.replace('.dylib','.%s.dylib'%self.vnum) - name2=libname.replace('.dylib','.%s.dylib'%cnum) - else: - name3=libname+'.'+self.vnum - name2=libname+'.'+cnum - if self.env.SONAME_ST: - v=self.env.SONAME_ST%name2 - self.env.append_value('LINKFLAGS',v.split()) - if self.env.DEST_OS!='openbsd': - outs=[node.parent.find_or_declare(name3)] - if name2!=name3: - outs.append(node.parent.find_or_declare(name2)) - self.create_task('vnum',node,outs) - if getattr(self,'install_task',None): - self.install_task.hasrun=Task.SKIP_ME - bld=self.bld - path=self.install_task.dest - if self.env.DEST_OS=='openbsd': - libname=self.link_task.outputs[0].name - t1=bld.install_as('%s%s%s'%(path,os.sep,libname),node,env=self.env,chmod=self.link_task.chmod) - self.vnum_install_task=(t1,) - else: - t1=bld.install_as(path+os.sep+name3,node,env=self.env,chmod=self.link_task.chmod) - t3=bld.symlink_as(path+os.sep+libname,name3) - if name2!=name3: - t2=bld.symlink_as(path+os.sep+name2,name3) - self.vnum_install_task=(t1,t2,t3) - else: - self.vnum_install_task=(t1,t3) - if'-dynamiclib'in self.env['LINKFLAGS']: - try: - inst_to=self.install_path - except AttributeError: - inst_to=self.link_task.__class__.inst_to - if inst_to: - p=Utils.subst_vars(inst_to,self.env) - path=os.path.join(p,name2) - self.env.append_value('LINKFLAGS',['-install_name',path]) - self.env.append_value('LINKFLAGS','-Wl,-compatibility_version,%s'%cnum) - self.env.append_value('LINKFLAGS','-Wl,-current_version,%s'%self.vnum) -class vnum(Task.Task): - color='CYAN' - quient=True - ext_in=['.bin'] - def keyword(self): - return'Symlinking' - def run(self): - for x in self.outputs: - path=x.abspath() - try: - os.remove(path) - except OSError: - pass - try: - os.symlink(self.inputs[0].name,path) - except OSError: - return 1 -class fake_shlib(link_task): - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - for x in self.outputs: - x.sig=Utils.h_file(x.abspath()) - return Task.SKIP_ME -class fake_stlib(stlink_task): - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - for x in self.outputs: - x.sig=Utils.h_file(x.abspath()) - return Task.SKIP_ME -@conf -def read_shlib(self,name,paths=[],export_includes=[],export_defines=[]): - return self(name=name,features='fake_lib',lib_paths=paths,lib_type='shlib',export_includes=export_includes,export_defines=export_defines) -@conf -def read_stlib(self,name,paths=[],export_includes=[],export_defines=[]): - return self(name=name,features='fake_lib',lib_paths=paths,lib_type='stlib',export_includes=export_includes,export_defines=export_defines) -lib_patterns={'shlib':['lib%s.so','%s.so','lib%s.dylib','lib%s.dll','%s.dll'],'stlib':['lib%s.a','%s.a','lib%s.dll','%s.dll','lib%s.lib','%s.lib'],} -@feature('fake_lib') -def process_lib(self): - node=None - names=[x%self.name for x in lib_patterns[self.lib_type]] - for x in self.lib_paths+[self.path]+SYSTEM_LIB_PATHS: - if not isinstance(x,Node.Node): - x=self.bld.root.find_node(x)or self.path.find_node(x) - if not x: - continue - for y in names: - node=x.find_node(y) - if node: - node.sig=Utils.h_file(node.abspath()) - break - else: - continue - break - else: - raise Errors.WafError('could not find library %r'%self.name) - self.link_task=self.create_task('fake_%s'%self.lib_type,[],[node]) - self.target=self.name -class fake_o(Task.Task): - def runnable_status(self): - return Task.SKIP_ME -@extension('.o','.obj') -def add_those_o_files(self,node): - tsk=self.create_task('fake_o',[],node) - try: - self.compiled_tasks.append(tsk) - except AttributeError: - self.compiled_tasks=[tsk] -@feature('fake_obj') -@before_method('process_source') -def process_objs(self): - for node in self.to_nodes(self.source): - self.add_those_o_files(node) - self.source=[] -@conf -def read_object(self,obj): - if not isinstance(obj,self.path.__class__): - obj=self.path.find_resource(obj) - return self(features='fake_obj',source=obj,name=obj.name) -@feature('cxxprogram','cprogram') -@after_method('apply_link','process_use') -def set_full_paths_hpux(self): - if self.env.DEST_OS!='hp-ux': - return - base=self.bld.bldnode.abspath() - for var in['LIBPATH','STLIBPATH']: - lst=[] - for x in self.env[var]: - if x.startswith('/'): - lst.append(x) - else: - lst.append(os.path.normpath(os.path.join(base,x))) - self.env[var]=lst diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py deleted file mode 100644 index 9379f5a4..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clang.py +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar,gcc -from waflib.Configure import conf -@conf -def find_clang(conf): - cc=conf.find_program('clang',var='CC') - conf.get_cc_version(cc,clang=True) - conf.env.CC_NAME='clang' -def configure(conf): - conf.find_clang() - conf.find_program(['llvm-ar','ar'],var='AR') - conf.find_ar() - conf.gcc_common_flags() - conf.gcc_modifier_platform() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py deleted file mode 100644 index fc97135e..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/clangxx.py +++ /dev/null @@ -1,20 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar,gxx -from waflib.Configure import conf -@conf -def find_clangxx(conf): - cxx=conf.find_program('clang++',var='CXX') - conf.get_cc_version(cxx,clang=True) - conf.env.CXX_NAME='clang' -def configure(conf): - conf.find_clangxx() - conf.find_program(['llvm-ar','ar'],var='AR') - conf.find_ar() - conf.gxx_common_flags() - conf.gxx_modifier_platform() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py deleted file mode 100644 index 9d8f7f96..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_c.py +++ /dev/null @@ -1,40 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib.Tools import ccroot -from waflib import Utils -from waflib.Logs import debug -c_compiler={'win32':['msvc','gcc','clang'],'cygwin':['gcc'],'darwin':['clang','gcc'],'aix':['xlc','gcc','clang'],'linux':['gcc','clang','icc'],'sunos':['suncc','gcc'],'irix':['gcc','irixcc'],'hpux':['gcc'],'osf1V':['gcc'],'gnu':['gcc','clang'],'java':['gcc','msvc','clang','icc'],'default':['gcc','clang'],} -def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=c_compiler.get(build_platform,c_compiler['default']) - return' '.join(possible_compiler_list) -def configure(conf): - try:test_for_compiler=conf.options.check_c_compiler or default_compilers() - except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_c')") - for compiler in re.split('[ ,]+',test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (C compiler)'%compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - debug('compiler_c: %r'%e) - else: - if conf.env['CC']: - conf.end_msg(conf.env.get_flat('CC')) - conf.env['COMPILER_CC']=compiler - break - conf.end_msg(False) - else: - conf.fatal('could not configure a C compiler!') -def options(opt): - test_for_compiler=default_compilers() - opt.load_special_tools('c_*.py',ban=['c_dumbpreproc.py']) - cc_compiler_opts=opt.add_option_group('Configuration options') - cc_compiler_opts.add_option('--check-c-compiler',default=None,help='list of C compilers to try [%s]'%test_for_compiler,dest="check_c_compiler") - for x in test_for_compiler.split(): - opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py deleted file mode 100644 index 67752294..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_cxx.py +++ /dev/null @@ -1,40 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib.Tools import ccroot -from waflib import Utils -from waflib.Logs import debug -cxx_compiler={'win32':['msvc','g++','clang++'],'cygwin':['g++'],'darwin':['clang++','g++'],'aix':['xlc++','g++','clang++'],'linux':['g++','clang++','icpc'],'sunos':['sunc++','g++'],'irix':['g++'],'hpux':['g++'],'osf1V':['g++'],'gnu':['g++','clang++'],'java':['g++','msvc','clang++','icpc'],'default':['g++','clang++']} -def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=cxx_compiler.get(build_platform,cxx_compiler['default']) - return' '.join(possible_compiler_list) -def configure(conf): - try:test_for_compiler=conf.options.check_cxx_compiler or default_compilers() - except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_cxx')") - for compiler in re.split('[ ,]+',test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (C++ compiler)'%compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - debug('compiler_cxx: %r'%e) - else: - if conf.env['CXX']: - conf.end_msg(conf.env.get_flat('CXX')) - conf.env['COMPILER_CXX']=compiler - break - conf.end_msg(False) - else: - conf.fatal('could not configure a C++ compiler!') -def options(opt): - test_for_compiler=default_compilers() - opt.load_special_tools('cxx_*.py') - cxx_compiler_opts=opt.add_option_group('Configuration options') - cxx_compiler_opts.add_option('--check-cxx-compiler',default=None,help='list of C++ compilers to try [%s]'%test_for_compiler,dest="check_cxx_compiler") - for x in test_for_compiler.split(): - opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py deleted file mode 100644 index 3e13c0dc..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_d.py +++ /dev/null @@ -1,37 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib import Utils,Logs -d_compiler={'default':['gdc','dmd','ldc2']} -def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=d_compiler.get(build_platform,d_compiler['default']) - return' '.join(possible_compiler_list) -def configure(conf): - try:test_for_compiler=conf.options.check_d_compiler or default_compilers() - except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_d')") - for compiler in re.split('[ ,]+',test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (D compiler)'%compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - Logs.debug('compiler_d: %r'%e) - else: - if conf.env.D: - conf.end_msg(conf.env.get_flat('D')) - conf.env['COMPILER_D']=compiler - break - conf.end_msg(False) - else: - conf.fatal('could not configure a D compiler!') -def options(opt): - test_for_compiler=default_compilers() - d_compiler_opts=opt.add_option_group('Configuration options') - d_compiler_opts.add_option('--check-d-compiler',default=None,help='list of D compilers to try [%s]'%test_for_compiler,dest='check_d_compiler') - for x in test_for_compiler.split(): - opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py deleted file mode 100644 index b31780ab..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/compiler_fc.py +++ /dev/null @@ -1,39 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib import Utils,Logs -from waflib.Tools import fc -fc_compiler={'win32':['gfortran','ifort'],'darwin':['gfortran','g95','ifort'],'linux':['gfortran','g95','ifort'],'java':['gfortran','g95','ifort'],'default':['gfortran'],'aix':['gfortran']} -def default_compilers(): - build_platform=Utils.unversioned_sys_platform() - possible_compiler_list=fc_compiler.get(build_platform,fc_compiler['default']) - return' '.join(possible_compiler_list) -def configure(conf): - try:test_for_compiler=conf.options.check_fortran_compiler or default_compilers() - except AttributeError:conf.fatal("Add options(opt): opt.load('compiler_fc')") - for compiler in re.split('[ ,]+',test_for_compiler): - conf.env.stash() - conf.start_msg('Checking for %r (Fortran compiler)'%compiler) - try: - conf.load(compiler) - except conf.errors.ConfigurationError as e: - conf.env.revert() - conf.end_msg(False) - Logs.debug('compiler_fortran: %r'%e) - else: - if conf.env['FC']: - conf.end_msg(conf.env.get_flat('FC')) - conf.env.COMPILER_FORTRAN=compiler - break - conf.end_msg(False) - else: - conf.fatal('could not configure a Fortran compiler!') -def options(opt): - test_for_compiler=default_compilers() - opt.load_special_tools('fc_*.py') - fortran_compiler_opts=opt.add_option_group('Configuration options') - fortran_compiler_opts.add_option('--check-fortran-compiler',default=None,help='list of Fortran compiler to try [%s]'%test_for_compiler,dest="check_fortran_compiler") - for x in test_for_compiler.split(): - opt.load('%s'%x) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py deleted file mode 100644 index fd023a40..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cs.py +++ /dev/null @@ -1,132 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib import Utils,Task,Options,Errors -from waflib.TaskGen import before_method,after_method,feature -from waflib.Tools import ccroot -from waflib.Configure import conf -import os,tempfile -ccroot.USELIB_VARS['cs']=set(['CSFLAGS','ASSEMBLIES','RESOURCES']) -ccroot.lib_patterns['csshlib']=['%s'] -@feature('cs') -@before_method('process_source') -def apply_cs(self): - cs_nodes=[] - no_nodes=[] - for x in self.to_nodes(self.source): - if x.name.endswith('.cs'): - cs_nodes.append(x) - else: - no_nodes.append(x) - self.source=no_nodes - bintype=getattr(self,'bintype',self.gen.endswith('.dll')and'library'or'exe') - self.cs_task=tsk=self.create_task('mcs',cs_nodes,self.path.find_or_declare(self.gen)) - tsk.env.CSTYPE='/target:%s'%bintype - tsk.env.OUT='/out:%s'%tsk.outputs[0].abspath() - self.env.append_value('CSFLAGS','/platform:%s'%getattr(self,'platform','anycpu')) - inst_to=getattr(self,'install_path',bintype=='exe'and'${BINDIR}'or'${LIBDIR}') - if inst_to: - mod=getattr(self,'chmod',bintype=='exe'and Utils.O755 or Utils.O644) - self.install_task=self.bld.install_files(inst_to,self.cs_task.outputs[:],env=self.env,chmod=mod) -@feature('cs') -@after_method('apply_cs') -def use_cs(self): - names=self.to_list(getattr(self,'use',[])) - get=self.bld.get_tgen_by_name - for x in names: - try: - y=get(x) - except Errors.WafError: - self.env.append_value('CSFLAGS','/reference:%s'%x) - continue - y.post() - tsk=getattr(y,'cs_task',None)or getattr(y,'link_task',None) - if not tsk: - self.bld.fatal('cs task has no link task for use %r'%self) - self.cs_task.dep_nodes.extend(tsk.outputs) - self.cs_task.set_run_after(tsk) - self.env.append_value('CSFLAGS','/reference:%s'%tsk.outputs[0].abspath()) -@feature('cs') -@after_method('apply_cs','use_cs') -def debug_cs(self): - csdebug=getattr(self,'csdebug',self.env.CSDEBUG) - if not csdebug: - return - node=self.cs_task.outputs[0] - if self.env.CS_NAME=='mono': - out=node.parent.find_or_declare(node.name+'.mdb') - else: - out=node.change_ext('.pdb') - self.cs_task.outputs.append(out) - try: - self.install_task.source.append(out) - except AttributeError: - pass - if csdebug=='pdbonly': - val=['/debug+','/debug:pdbonly'] - elif csdebug=='full': - val=['/debug+','/debug:full'] - else: - val=['/debug-'] - self.env.append_value('CSFLAGS',val) -class mcs(Task.Task): - color='YELLOW' - run_str='${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}' - def exec_command(self,cmd,**kw): - bld=self.generator.bld - try: - if not kw.get('cwd',None): - kw['cwd']=bld.cwd - except AttributeError: - bld.cwd=kw['cwd']=bld.variant_dir - try: - tmp=None - if isinstance(cmd,list)and len(' '.join(cmd))>=8192: - program=cmd[0] - cmd=[self.quote_response_command(x)for x in cmd] - (fd,tmp)=tempfile.mkstemp() - os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]).encode()) - os.close(fd) - cmd=[program,'@'+tmp] - ret=self.generator.bld.exec_command(cmd,**kw) - finally: - if tmp: - try: - os.remove(tmp) - except OSError: - pass - return ret - def quote_response_command(self,flag): - if flag.lower()=='/noconfig': - return'' - if flag.find(' ')>-1: - for x in('/r:','/reference:','/resource:','/lib:','/out:'): - if flag.startswith(x): - flag='%s"%s"'%(x,'","'.join(flag[len(x):].split(','))) - break - else: - flag='"%s"'%flag - return flag -def configure(conf): - csc=getattr(Options.options,'cscbinary',None) - if csc: - conf.env.MCS=csc - conf.find_program(['csc','mcs','gmcs'],var='MCS') - conf.env.ASS_ST='/r:%s' - conf.env.RES_ST='/resource:%s' - conf.env.CS_NAME='csc' - if str(conf.env.MCS).lower().find('mcs')>-1: - conf.env.CS_NAME='mono' -def options(opt): - opt.add_option('--with-csc-binary',type='string',dest='cscbinary') -class fake_csshlib(Task.Task): - color='YELLOW' - inst_to=None - def runnable_status(self): - for x in self.outputs: - x.sig=Utils.h_file(x.abspath()) - return Task.SKIP_ME -@conf -def read_csshlib(self,name,paths=[]): - return self(name=name,features='fake_lib',lib_paths=paths,lib_type='csshlib') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py deleted file mode 100644 index 6f039e93..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/cxx.py +++ /dev/null @@ -1,26 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib import TaskGen,Task -from waflib.Tools import c_preproc -from waflib.Tools.ccroot import link_task,stlink_task -@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++') -def cxx_hook(self,node): - return self.create_compiled_task('cxx',node) -if not'.c'in TaskGen.task_gen.mappings: - TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp'] -class cxx(Task.Task): - run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}' - vars=['CXXDEPS'] - ext_in=['.h'] - scan=c_preproc.scan -class cxxprogram(link_task): - run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}' - vars=['LINKDEPS'] - ext_out=['.bin'] - inst_to='${BINDIR}' -class cxxshlib(cxxprogram): - inst_to='${LIBDIR}' -class cxxstlib(stlink_task): - pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py deleted file mode 100644 index e8c98f00..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d.py +++ /dev/null @@ -1,54 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib import Utils,Task,Errors -from waflib.TaskGen import taskgen_method,feature,extension -from waflib.Tools import d_scan,d_config -from waflib.Tools.ccroot import link_task,stlink_task -class d(Task.Task): - color='GREEN' - run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}' - scan=d_scan.scan -class d_with_header(d): - run_str='${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}' -class d_header(Task.Task): - color='BLUE' - run_str='${D} ${D_HEADER} ${SRC}' -class dprogram(link_task): - run_str='${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}' - inst_to='${BINDIR}' -class dshlib(dprogram): - inst_to='${LIBDIR}' -class dstlib(stlink_task): - pass -@extension('.d','.di','.D') -def d_hook(self,node): - ext=Utils.destos_to_binfmt(self.env.DEST_OS)=='pe'and'obj'or'o' - out='%s.%d.%s'%(node.name,self.idx,ext) - def create_compiled_task(self,name,node): - task=self.create_task(name,node,node.parent.find_or_declare(out)) - try: - self.compiled_tasks.append(task) - except AttributeError: - self.compiled_tasks=[task] - return task - if getattr(self,'generate_headers',None): - tsk=create_compiled_task(self,'d_with_header',node) - tsk.outputs.append(node.change_ext(self.env['DHEADER_ext'])) - else: - tsk=create_compiled_task(self,'d',node) - return tsk -@taskgen_method -def generate_header(self,filename): - try: - self.header_lst.append([filename,self.install_path]) - except AttributeError: - self.header_lst=[[filename,self.install_path]] -@feature('d') -def process_header(self): - for i in getattr(self,'header_lst',[]): - node=self.path.find_resource(i[0]) - if not node: - raise Errors.WafError('file %r not found on d obj'%i[0]) - self.create_task('d_header',node,node.change_ext('.di')) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py deleted file mode 100644 index 71b7b6e1..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_config.py +++ /dev/null @@ -1,52 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib import Utils -from waflib.Configure import conf -@conf -def d_platform_flags(self): - v=self.env - if not v.DEST_OS: - v.DEST_OS=Utils.unversioned_sys_platform() - binfmt=Utils.destos_to_binfmt(self.env.DEST_OS) - if binfmt=='pe': - v['dprogram_PATTERN']='%s.exe' - v['dshlib_PATTERN']='lib%s.dll' - v['dstlib_PATTERN']='lib%s.a' - elif binfmt=='mac-o': - v['dprogram_PATTERN']='%s' - v['dshlib_PATTERN']='lib%s.dylib' - v['dstlib_PATTERN']='lib%s.a' - else: - v['dprogram_PATTERN']='%s' - v['dshlib_PATTERN']='lib%s.so' - v['dstlib_PATTERN']='lib%s.a' -DLIB=''' -version(D_Version2) { - import std.stdio; - int main() { - writefln("phobos2"); - return 0; - } -} else { - version(Tango) { - import tango.stdc.stdio; - int main() { - printf("tango"); - return 0; - } - } else { - import std.stdio; - int main() { - writefln("phobos1"); - return 0; - } - } -} -''' -@conf -def check_dlibrary(self,execute=True): - ret=self.check_cc(features='d dprogram',fragment=DLIB,compile_filename='test.d',execute=execute,define_ret=True) - if execute: - self.env.DLIBRARY=ret.strip() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py deleted file mode 100644 index 47f91968..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/d_scan.py +++ /dev/null @@ -1,133 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib import Utils,Logs -def filter_comments(filename): - txt=Utils.readf(filename) - i=0 - buf=[] - max=len(txt) - begin=0 - while i-1: - conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead') - conf.load('ar') - conf.load('d') - conf.common_flags_dmd() - conf.d_platform_flags() - if str(conf.env.D).find('ldc')>-1: - conf.common_flags_ldc() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py deleted file mode 100644 index 421dfa6e..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/errcheck.py +++ /dev/null @@ -1,163 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -typos={'feature':'features','sources':'source','targets':'target','include':'includes','export_include':'export_includes','define':'defines','importpath':'includes','installpath':'install_path','iscopy':'is_copy',} -meths_typos=['__call__','program','shlib','stlib','objects'] -import sys -from waflib import Logs,Build,Node,Task,TaskGen,ConfigSet,Errors,Utils -import waflib.Tools.ccroot -def check_same_targets(self): - mp=Utils.defaultdict(list) - uids={} - def check_task(tsk): - if not isinstance(tsk,Task.Task): - return - for node in tsk.outputs: - mp[node].append(tsk) - try: - uids[tsk.uid()].append(tsk) - except KeyError: - uids[tsk.uid()]=[tsk] - for g in self.groups: - for tg in g: - try: - for tsk in tg.tasks: - check_task(tsk) - except AttributeError: - check_task(tg) - dupe=False - for(k,v)in mp.items(): - if len(v)>1: - dupe=True - msg='* Node %r is created more than once%s. The task generators are:'%(k,Logs.verbose==1 and" (full message on 'waf -v -v')"or"") - Logs.error(msg) - for x in v: - if Logs.verbose>1: - Logs.error(' %d. %r'%(1+v.index(x),x.generator)) - else: - Logs.error(' %d. %r in %r'%(1+v.index(x),x.generator.name,getattr(x.generator,'path',None))) - if not dupe: - for(k,v)in uids.items(): - if len(v)>1: - Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid') - for tsk in v: - Logs.error(' - object %r (%r) defined in %r'%(tsk.__class__.__name__,tsk,tsk.generator)) -def check_invalid_constraints(self): - feat=set([]) - for x in list(TaskGen.feats.values()): - feat.union(set(x)) - for(x,y)in TaskGen.task_gen.prec.items(): - feat.add(x) - feat.union(set(y)) - ext=set([]) - for x in TaskGen.task_gen.mappings.values(): - ext.add(x.__name__) - invalid=ext&feat - if invalid: - Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method'%list(invalid)) - for cls in list(Task.classes.values()): - if sys.hexversion>0x3000000 and issubclass(cls,Task.Task)and isinstance(cls.hcode,str): - raise Errors.WafError('Class %r has hcode value %r of type , expecting (use Utils.h_cmd() ?)'%(cls,cls.hcode)) - for x in('before','after'): - for y in Utils.to_list(getattr(cls,x,[])): - if not Task.classes.get(y,None): - Logs.error('Erroneous order constraint %r=%r on task class %r'%(x,y,cls.__name__)) - if getattr(cls,'rule',None): - Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")'%cls.__name__) -def replace(m): - oldcall=getattr(Build.BuildContext,m) - def call(self,*k,**kw): - ret=oldcall(self,*k,**kw) - for x in typos: - if x in kw: - if x=='iscopy'and'subst'in getattr(self,'features',''): - continue - Logs.error('Fix the typo %r -> %r on %r'%(x,typos[x],ret)) - return ret - setattr(Build.BuildContext,m,call) -def enhance_lib(): - for m in meths_typos: - replace(m) - def ant_glob(self,*k,**kw): - if k: - lst=Utils.to_list(k[0]) - for pat in lst: - if'..'in pat.split('/'): - Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'"%k[0]) - if kw.get('remove',True): - try: - if self.is_child_of(self.ctx.bldnode)and not kw.get('quiet',False): - Logs.error('Using ant_glob on the build folder (%r) is dangerous (quiet=True to disable this warning)'%self) - except AttributeError: - pass - return self.old_ant_glob(*k,**kw) - Node.Node.old_ant_glob=Node.Node.ant_glob - Node.Node.ant_glob=ant_glob - old=Task.is_before - def is_before(t1,t2): - ret=old(t1,t2) - if ret and old(t2,t1): - Logs.error('Contradictory order constraints in classes %r %r'%(t1,t2)) - return ret - Task.is_before=is_before - def check_err_features(self): - lst=self.to_list(self.features) - if'shlib'in lst: - Logs.error('feature shlib -> cshlib, dshlib or cxxshlib') - for x in('c','cxx','d','fc'): - if not x in lst and lst and lst[0]in[x+y for y in('program','shlib','stlib')]: - Logs.error('%r features is probably missing %r'%(self,x)) - TaskGen.feature('*')(check_err_features) - def check_err_order(self): - if not hasattr(self,'rule')and not'subst'in Utils.to_list(self.features): - for x in('before','after','ext_in','ext_out'): - if hasattr(self,x): - Logs.warn('Erroneous order constraint %r on non-rule based task generator %r'%(x,self)) - else: - for x in('before','after'): - for y in self.to_list(getattr(self,x,[])): - if not Task.classes.get(y,None): - Logs.error('Erroneous order constraint %s=%r on %r (no such class)'%(x,y,self)) - TaskGen.feature('*')(check_err_order) - def check_compile(self): - check_invalid_constraints(self) - try: - ret=self.orig_compile() - finally: - check_same_targets(self) - return ret - Build.BuildContext.orig_compile=Build.BuildContext.compile - Build.BuildContext.compile=check_compile - def use_rec(self,name,**kw): - try: - y=self.bld.get_tgen_by_name(name) - except Errors.WafError: - pass - else: - idx=self.bld.get_group_idx(self) - odx=self.bld.get_group_idx(y) - if odx>idx: - msg="Invalid 'use' across build groups:" - if Logs.verbose>1: - msg+='\n target %r\n uses:\n %r'%(self,y) - else: - msg+=" %r uses %r (try 'waf -v -v' for the full error)"%(self.name,name) - raise Errors.WafError(msg) - self.orig_use_rec(name,**kw) - TaskGen.task_gen.orig_use_rec=TaskGen.task_gen.use_rec - TaskGen.task_gen.use_rec=use_rec - def getattri(self,name,default=None): - if name=='append'or name=='add': - raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique') - elif name=='prepend': - raise Errors.WafError('env.prepend does not exist: use env.prepend_value') - if name in self.__slots__: - return object.__getattr__(self,name,default) - else: - return self[name] - ConfigSet.ConfigSet.__getattr__=getattri -def options(opt): - enhance_lib() -def configure(conf): - pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py deleted file mode 100644 index bc9f0b0b..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc.py +++ /dev/null @@ -1,115 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib import Utils,Task,Logs -from waflib.Tools import ccroot,fc_config,fc_scan -from waflib.TaskGen import feature,extension -from waflib.Configure import conf -ccroot.USELIB_VARS['fc']=set(['FCFLAGS','DEFINES','INCLUDES']) -ccroot.USELIB_VARS['fcprogram_test']=ccroot.USELIB_VARS['fcprogram']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -ccroot.USELIB_VARS['fcshlib']=set(['LIB','STLIB','LIBPATH','STLIBPATH','LINKFLAGS','RPATH','LINKDEPS']) -ccroot.USELIB_VARS['fcstlib']=set(['ARFLAGS','LINKDEPS']) -@feature('fcprogram','fcshlib','fcstlib','fcprogram_test') -def dummy(self): - pass -@extension('.f','.f90','.F','.F90','.for','.FOR') -def fc_hook(self,node): - return self.create_compiled_task('fc',node) -@conf -def modfile(conf,name): - return{'lower':name.lower()+'.mod','lower.MOD':name.upper()+'.MOD','UPPER.mod':name.upper()+'.mod','UPPER':name.upper()+'.MOD'}[conf.env.FC_MOD_CAPITALIZATION or'lower'] -def get_fortran_tasks(tsk): - bld=tsk.generator.bld - tasks=bld.get_tasks_group(bld.get_group_idx(tsk.generator)) - return[x for x in tasks if isinstance(x,fc)and not getattr(x,'nomod',None)and not getattr(x,'mod_fortran_done',None)] -class fc(Task.Task): - color='GREEN' - run_str='${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()}' - vars=["FORTRANMODPATHFLAG"] - def scan(self): - tmp=fc_scan.fortran_parser(self.generator.includes_nodes) - tmp.task=self - tmp.start(self.inputs[0]) - if Logs.verbose: - Logs.debug('deps: deps for %r: %r; unresolved %r'%(self.inputs,tmp.nodes,tmp.names)) - return(tmp.nodes,tmp.names) - def runnable_status(self): - if getattr(self,'mod_fortran_done',None): - return super(fc,self).runnable_status() - bld=self.generator.bld - lst=get_fortran_tasks(self) - for tsk in lst: - tsk.mod_fortran_done=True - for tsk in lst: - ret=tsk.runnable_status() - if ret==Task.ASK_LATER: - for x in lst: - x.mod_fortran_done=None - return Task.ASK_LATER - ins=Utils.defaultdict(set) - outs=Utils.defaultdict(set) - for tsk in lst: - key=tsk.uid() - for x in bld.raw_deps[key]: - if x.startswith('MOD@'): - name=bld.modfile(x.replace('MOD@','')) - node=bld.srcnode.find_or_declare(name) - if not getattr(node,'sig',None): - node.sig=Utils.SIG_NIL - tsk.set_outputs(node) - outs[id(node)].add(tsk) - for tsk in lst: - key=tsk.uid() - for x in bld.raw_deps[key]: - if x.startswith('USE@'): - name=bld.modfile(x.replace('USE@','')) - node=bld.srcnode.find_resource(name) - if node and node not in tsk.outputs: - if not node in bld.node_deps[key]: - bld.node_deps[key].append(node) - ins[id(node)].add(tsk) - for k in ins.keys(): - for a in ins[k]: - a.run_after.update(outs[k]) - tmp=[] - for t in outs[k]: - tmp.extend(t.outputs) - a.dep_nodes.extend(tmp) - a.dep_nodes.sort(key=lambda x:x.abspath()) - for tsk in lst: - try: - delattr(tsk,'cache_sig') - except AttributeError: - pass - return super(fc,self).runnable_status() -class fcprogram(ccroot.link_task): - color='YELLOW' - run_str='${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}' - inst_to='${BINDIR}' -class fcshlib(fcprogram): - inst_to='${LIBDIR}' -class fcprogram_test(fcprogram): - def runnable_status(self): - ret=super(fcprogram_test,self).runnable_status() - if ret==Task.SKIP_ME: - ret=Task.RUN_ME - return ret - def exec_command(self,cmd,**kw): - bld=self.generator.bld - kw['shell']=isinstance(cmd,str) - kw['stdout']=kw['stderr']=Utils.subprocess.PIPE - kw['cwd']=bld.variant_dir - bld.out=bld.err='' - bld.to_log('command: %s\n'%cmd) - kw['output']=0 - try: - (bld.out,bld.err)=bld.cmd_and_log(cmd,**kw) - except Exception: - return-1 - if bld.out: - bld.to_log("out: %s\n"%bld.out) - if bld.err: - bld.to_log("err: %s\n"%bld.err) -class fcstlib(ccroot.stlink_task): - pass diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py deleted file mode 100644 index 58d8ccf5..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_config.py +++ /dev/null @@ -1,286 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re,os,sys,shlex -from waflib.Configure import conf -from waflib.TaskGen import feature,before_method -FC_FRAGMENT=' program main\n end program main\n' -FC_FRAGMENT2=' PROGRAM MAIN\n END\n' -@conf -def fc_flags(conf): - v=conf.env - v['FC_SRC_F']=[] - v['FC_TGT_F']=['-c','-o'] - v['FCINCPATH_ST']='-I%s' - v['FCDEFINES_ST']='-D%s' - if not v['LINK_FC']:v['LINK_FC']=v['FC'] - v['FCLNK_SRC_F']=[] - v['FCLNK_TGT_F']=['-o'] - v['FCFLAGS_fcshlib']=['-fpic'] - v['LINKFLAGS_fcshlib']=['-shared'] - v['fcshlib_PATTERN']='lib%s.so' - v['fcstlib_PATTERN']='lib%s.a' - v['FCLIB_ST']='-l%s' - v['FCLIBPATH_ST']='-L%s' - v['FCSTLIB_ST']='-l%s' - v['FCSTLIBPATH_ST']='-L%s' - v['FCSTLIB_MARKER']='-Wl,-Bstatic' - v['FCSHLIB_MARKER']='-Wl,-Bdynamic' - v['SONAME_ST']='-Wl,-h,%s' -@conf -def fc_add_flags(conf): - conf.add_os_flags('FCFLAGS',dup=False) - conf.add_os_flags('LINKFLAGS',dup=False) - conf.add_os_flags('LDFLAGS',dup=False) -@conf -def check_fortran(self,*k,**kw): - self.check_cc(fragment=FC_FRAGMENT,compile_filename='test.f',features='fc fcprogram',msg='Compiling a simple fortran app') -@conf -def check_fc(self,*k,**kw): - kw['compiler']='fc' - if not'compile_mode'in kw: - kw['compile_mode']='fc' - if not'type'in kw: - kw['type']='fcprogram' - if not'compile_filename'in kw: - kw['compile_filename']='test.f90' - if not'code'in kw: - kw['code']=FC_FRAGMENT - return self.check(*k,**kw) -@conf -def fortran_modifier_darwin(conf): - v=conf.env - v['FCFLAGS_fcshlib']=['-fPIC'] - v['LINKFLAGS_fcshlib']=['-dynamiclib'] - v['fcshlib_PATTERN']='lib%s.dylib' - v['FRAMEWORKPATH_ST']='-F%s' - v['FRAMEWORK_ST']='-framework %s' - v['LINKFLAGS_fcstlib']=[] - v['FCSHLIB_MARKER']='' - v['FCSTLIB_MARKER']='' - v['SONAME_ST']='' -@conf -def fortran_modifier_win32(conf): - v=conf.env - v['fcprogram_PATTERN']=v['fcprogram_test_PATTERN']='%s.exe' - v['fcshlib_PATTERN']='%s.dll' - v['implib_PATTERN']='lib%s.dll.a' - v['IMPLIB_ST']='-Wl,--out-implib,%s' - v['FCFLAGS_fcshlib']=[] - v.append_value('FCFLAGS_fcshlib',['-DDLL_EXPORT']) - v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) -@conf -def fortran_modifier_cygwin(conf): - fortran_modifier_win32(conf) - v=conf.env - v['fcshlib_PATTERN']='cyg%s.dll' - v.append_value('LINKFLAGS_fcshlib',['-Wl,--enable-auto-image-base']) - v['FCFLAGS_fcshlib']=[] -@conf -def check_fortran_dummy_main(self,*k,**kw): - if not self.env.CC: - self.fatal('A c compiler is required for check_fortran_dummy_main') - lst=['MAIN__','__MAIN','_MAIN','MAIN_','MAIN'] - lst.extend([m.lower()for m in lst]) - lst.append('') - self.start_msg('Detecting whether we need a dummy main') - for main in lst: - kw['fortran_main']=main - try: - self.check_cc(fragment='int %s() { return 0; }\n'%(main or'test'),features='c fcprogram',mandatory=True) - if not main: - self.env.FC_MAIN=-1 - self.end_msg('no') - else: - self.env.FC_MAIN=main - self.end_msg('yes %s'%main) - break - except self.errors.ConfigurationError: - pass - else: - self.end_msg('not found') - self.fatal('could not detect whether fortran requires a dummy main, see the config.log') -GCC_DRIVER_LINE=re.compile('^Driving:') -POSIX_STATIC_EXT=re.compile('\S+\.a') -POSIX_LIB_FLAGS=re.compile('-l\S+') -@conf -def is_link_verbose(self,txt): - assert isinstance(txt,str) - for line in txt.splitlines(): - if not GCC_DRIVER_LINE.search(line): - if POSIX_STATIC_EXT.search(line)or POSIX_LIB_FLAGS.search(line): - return True - return False -@conf -def check_fortran_verbose_flag(self,*k,**kw): - self.start_msg('fortran link verbose flag') - for x in('-v','--verbose','-verbose','-V'): - try: - self.check_cc(features='fc fcprogram_test',fragment=FC_FRAGMENT2,compile_filename='test.f',linkflags=[x],mandatory=True) - except self.errors.ConfigurationError: - pass - else: - if self.is_link_verbose(self.test_bld.err)or self.is_link_verbose(self.test_bld.out): - self.end_msg(x) - break - else: - self.end_msg('failure') - self.fatal('Could not obtain the fortran link verbose flag (see config.log)') - self.env.FC_VERBOSE_FLAG=x - return x -LINKFLAGS_IGNORED=[r'-lang*',r'-lcrt[a-zA-Z0-9\.]*\.o',r'-lc$',r'-lSystem',r'-libmil',r'-LIST:*',r'-LNO:*'] -if os.name=='nt': - LINKFLAGS_IGNORED.extend([r'-lfrt*',r'-luser32',r'-lkernel32',r'-ladvapi32',r'-lmsvcrt',r'-lshell32',r'-lmingw',r'-lmoldname']) -else: - LINKFLAGS_IGNORED.append(r'-lgcc*') -RLINKFLAGS_IGNORED=[re.compile(f)for f in LINKFLAGS_IGNORED] -def _match_ignore(line): - for i in RLINKFLAGS_IGNORED: - if i.match(line): - return True - return False -def parse_fortran_link(lines): - final_flags=[] - for line in lines: - if not GCC_DRIVER_LINE.match(line): - _parse_flink_line(line,final_flags) - return final_flags -SPACE_OPTS=re.compile('^-[LRuYz]$') -NOSPACE_OPTS=re.compile('^-[RL]') -def _parse_flink_token(lexer,token,tmp_flags): - if _match_ignore(token): - pass - elif token.startswith('-lkernel32')and sys.platform=='cygwin': - tmp_flags.append(token) - elif SPACE_OPTS.match(token): - t=lexer.get_token() - if t.startswith('P,'): - t=t[2:] - for opt in t.split(os.pathsep): - tmp_flags.append('-L%s'%opt) - elif NOSPACE_OPTS.match(token): - tmp_flags.append(token) - elif POSIX_LIB_FLAGS.match(token): - tmp_flags.append(token) - else: - pass - t=lexer.get_token() - return t -def _parse_flink_line(line,final_flags): - lexer=shlex.shlex(line,posix=True) - lexer.whitespace_split=True - t=lexer.get_token() - tmp_flags=[] - while t: - t=_parse_flink_token(lexer,t,tmp_flags) - final_flags.extend(tmp_flags) - return final_flags -@conf -def check_fortran_clib(self,autoadd=True,*k,**kw): - if not self.env.FC_VERBOSE_FLAG: - self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?') - self.start_msg('Getting fortran runtime link flags') - try: - self.check_cc(fragment=FC_FRAGMENT2,compile_filename='test.f',features='fc fcprogram_test',linkflags=[self.env.FC_VERBOSE_FLAG]) - except Exception: - self.end_msg(False) - if kw.get('mandatory',True): - conf.fatal('Could not find the c library flags') - else: - out=self.test_bld.err - flags=parse_fortran_link(out.splitlines()) - self.end_msg('ok (%s)'%' '.join(flags)) - self.env.LINKFLAGS_CLIB=flags - return flags - return[] -def getoutput(conf,cmd,stdin=False): - from waflib import Errors - if conf.env.env: - env=conf.env.env - else: - env=dict(os.environ) - env['LANG']='C' - input=stdin and'\n'.encode()or None - try: - out,err=conf.cmd_and_log(cmd,env=env,output=0,input=input) - except Errors.WafError as e: - if not(hasattr(e,'stderr')and hasattr(e,'stdout')): - raise e - else: - out=e.stdout - err=e.stderr - except Exception: - conf.fatal('could not determine the compiler version %r'%cmd) - return(out,err) -ROUTINES_CODE="""\ - subroutine foobar() - return - end - subroutine foo_bar() - return - end -""" -MAIN_CODE=""" -void %(dummy_func_nounder)s(void); -void %(dummy_func_under)s(void); -int %(main_func_name)s() { - %(dummy_func_nounder)s(); - %(dummy_func_under)s(); - return 0; -} -""" -@feature('link_main_routines_func') -@before_method('process_source') -def link_main_routines_tg_method(self): - def write_test_file(task): - task.outputs[0].write(task.generator.code) - bld=self.bld - bld(rule=write_test_file,target='main.c',code=MAIN_CODE%self.__dict__) - bld(rule=write_test_file,target='test.f',code=ROUTINES_CODE) - bld(features='fc fcstlib',source='test.f',target='test') - bld(features='c fcprogram',source='main.c',target='app',use='test') -def mangling_schemes(): - for u in('_',''): - for du in('','_'): - for c in("lower","upper"): - yield(u,du,c) -def mangle_name(u,du,c,name): - return getattr(name,c)()+u+(name.find('_')!=-1 and du or'') -@conf -def check_fortran_mangling(self,*k,**kw): - if not self.env.CC: - self.fatal('A c compiler is required for link_main_routines') - if not self.env.FC: - self.fatal('A fortran compiler is required for link_main_routines') - if not self.env.FC_MAIN: - self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)') - self.start_msg('Getting fortran mangling scheme') - for(u,du,c)in mangling_schemes(): - try: - self.check_cc(compile_filename=[],features='link_main_routines_func',msg='nomsg',errmsg='nomsg',mandatory=True,dummy_func_nounder=mangle_name(u,du,c,"foobar"),dummy_func_under=mangle_name(u,du,c,"foo_bar"),main_func_name=self.env.FC_MAIN) - except self.errors.ConfigurationError: - pass - else: - self.end_msg("ok ('%s', '%s', '%s-case')"%(u,du,c)) - self.env.FORTRAN_MANGLING=(u,du,c) - break - else: - self.end_msg(False) - self.fatal('mangler not found') - return(u,du,c) -@feature('pyext') -@before_method('propagate_uselib_vars','apply_link') -def set_lib_pat(self): - self.env['fcshlib_PATTERN']=self.env['pyext_PATTERN'] -@conf -def detect_openmp(self): - for x in('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'): - try: - self.check_fc(msg='Checking for OpenMP flag %s'%x,fragment='program main\n call omp_get_num_threads()\nend program main',fcflags=x,linkflags=x,uselib_store='OPENMP') - except self.errors.ConfigurationError: - pass - else: - break - else: - self.fatal('Could not find OpenMP') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py deleted file mode 100644 index c07a22da..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/fc_scan.py +++ /dev/null @@ -1,64 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -INC_REGEX="""(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])""" -USE_REGEX="""(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" -MOD_REGEX="""(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)""" -re_inc=re.compile(INC_REGEX,re.I) -re_use=re.compile(USE_REGEX,re.I) -re_mod=re.compile(MOD_REGEX,re.I) -class fortran_parser(object): - def __init__(self,incpaths): - self.seen=[] - self.nodes=[] - self.names=[] - self.incpaths=incpaths - def find_deps(self,node): - txt=node.read() - incs=[] - uses=[] - mods=[] - for line in txt.splitlines(): - m=re_inc.search(line) - if m: - incs.append(m.group(1)) - m=re_use.search(line) - if m: - uses.append(m.group(1)) - m=re_mod.search(line) - if m: - mods.append(m.group(1)) - return(incs,uses,mods) - def start(self,node): - self.waiting=[node] - while self.waiting: - nd=self.waiting.pop(0) - self.iter(nd) - def iter(self,node): - incs,uses,mods=self.find_deps(node) - for x in incs: - if x in self.seen: - continue - self.seen.append(x) - self.tryfind_header(x) - for x in uses: - name="USE@%s"%x - if not name in self.names: - self.names.append(name) - for x in mods: - name="MOD@%s"%x - if not name in self.names: - self.names.append(name) - def tryfind_header(self,filename): - found=None - for n in self.incpaths: - found=n.find_resource(filename) - if found: - self.nodes.append(found) - self.waiting.append(found) - break - if not found: - if not filename in self.names: - self.names.append(filename) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py deleted file mode 100644 index 7a04074e..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/flex.py +++ /dev/null @@ -1,32 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import waflib.TaskGen,os,re -def decide_ext(self,node): - if'cxx'in self.features: - return['.lex.cc'] - return['.lex.c'] -def flexfun(tsk): - env=tsk.env - bld=tsk.generator.bld - wd=bld.variant_dir - def to_list(xx): - if isinstance(xx,str):return[xx] - return xx - tsk.last_cmd=lst=[] - lst.extend(to_list(env['FLEX'])) - lst.extend(to_list(env['FLEXFLAGS'])) - inputs=[a.path_from(bld.bldnode)for a in tsk.inputs] - if env.FLEX_MSYS: - inputs=[x.replace(os.sep,'/')for x in inputs] - lst.extend(inputs) - lst=[x for x in lst if x] - txt=bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) - tsk.outputs[0].write(txt.replace('\r\n','\n').replace('\r','\n')) -waflib.TaskGen.declare_chain(name='flex',rule=flexfun,ext_in='.l',decider=decide_ext,) -def configure(conf): - conf.find_program('flex',var='FLEX') - conf.env.FLEXFLAGS=['-t'] - if re.search(r"\\msys\\[0-9.]+\\bin\\flex.exe$",conf.env.FLEX[0]): - conf.env.FLEX_MSYS=True diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py deleted file mode 100644 index 6524e1c0..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/g95.py +++ /dev/null @@ -1,54 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib import Utils -from waflib.Tools import fc,fc_config,fc_scan,ar -from waflib.Configure import conf -@conf -def find_g95(conf): - fc=conf.find_program('g95',var='FC') - conf.get_g95_version(fc) - conf.env.FC_NAME='G95' -@conf -def g95_flags(conf): - v=conf.env - v['FCFLAGS_fcshlib']=['-fPIC'] - v['FORTRANMODFLAG']=['-fmod=',''] - v['FCFLAGS_DEBUG']=['-Werror'] -@conf -def g95_modifier_win32(conf): - fc_config.fortran_modifier_win32(conf) -@conf -def g95_modifier_cygwin(conf): - fc_config.fortran_modifier_cygwin(conf) -@conf -def g95_modifier_darwin(conf): - fc_config.fortran_modifier_darwin(conf) -@conf -def g95_modifier_platform(conf): - dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() - g95_modifier_func=getattr(conf,'g95_modifier_'+dest_os,None) - if g95_modifier_func: - g95_modifier_func() -@conf -def get_g95_version(conf,fc): - version_re=re.compile(r"g95\s*(?P\d*)\.(?P\d*)").search - cmd=fc+['--version'] - out,err=fc_config.getoutput(conf,cmd,stdin=False) - if out: - match=version_re(out) - else: - match=version_re(err) - if not match: - conf.fatal('cannot determine g95 version') - k=match.groupdict() - conf.env['FC_VERSION']=(k['major'],k['minor']) -def configure(conf): - conf.find_g95() - conf.find_ar() - conf.fc_flags() - conf.fc_add_flags() - conf.g95_flags() - conf.g95_modifier_platform() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py deleted file mode 100644 index 4817c231..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gas.py +++ /dev/null @@ -1,12 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import waflib.Tools.asm -from waflib.Tools import ar -def configure(conf): - conf.find_program(['gas','gcc'],var='AS') - conf.env.AS_TGT_F=['-c','-o'] - conf.env.ASLNK_TGT_F=['-o'] - conf.find_ar() - conf.load('asm') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py deleted file mode 100644 index a3c7720f..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gcc.py +++ /dev/null @@ -1,102 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar -from waflib.Configure import conf -@conf -def find_gcc(conf): - cc=conf.find_program(['gcc','cc'],var='CC') - conf.get_cc_version(cc,gcc=True) - conf.env.CC_NAME='gcc' -@conf -def gcc_common_flags(conf): - v=conf.env - v['CC_SRC_F']=[] - v['CC_TGT_F']=['-c','-o'] - if not v['LINK_CC']:v['LINK_CC']=v['CC'] - v['CCLNK_SRC_F']=[] - v['CCLNK_TGT_F']=['-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['RPATH_ST']='-Wl,-rpath,%s' - v['SONAME_ST']='-Wl,-h,%s' - v['SHLIB_MARKER']='-Wl,-Bdynamic' - v['STLIB_MARKER']='-Wl,-Bstatic' - v['cprogram_PATTERN']='%s' - v['CFLAGS_cshlib']=['-fPIC'] - v['LINKFLAGS_cshlib']=['-shared'] - v['cshlib_PATTERN']='lib%s.so' - v['LINKFLAGS_cstlib']=['-Wl,-Bstatic'] - v['cstlib_PATTERN']='lib%s.a' - v['LINKFLAGS_MACBUNDLE']=['-bundle','-undefined','dynamic_lookup'] - v['CFLAGS_MACBUNDLE']=['-fPIC'] - v['macbundle_PATTERN']='%s.bundle' -@conf -def gcc_modifier_win32(conf): - v=conf.env - v['cprogram_PATTERN']='%s.exe' - v['cshlib_PATTERN']='%s.dll' - v['implib_PATTERN']='lib%s.dll.a' - v['IMPLIB_ST']='-Wl,--out-implib,%s' - v['CFLAGS_cshlib']=[] - v.append_value('LINKFLAGS',['-Wl,--enable-auto-import']) -@conf -def gcc_modifier_cygwin(conf): - gcc_modifier_win32(conf) - v=conf.env - v['cshlib_PATTERN']='cyg%s.dll' - v.append_value('LINKFLAGS_cshlib',['-Wl,--enable-auto-image-base']) - v['CFLAGS_cshlib']=[] -@conf -def gcc_modifier_darwin(conf): - v=conf.env - v['CFLAGS_cshlib']=['-fPIC'] - v['LINKFLAGS_cshlib']=['-dynamiclib'] - v['cshlib_PATTERN']='lib%s.dylib' - v['FRAMEWORKPATH_ST']='-F%s' - v['FRAMEWORK_ST']=['-framework'] - v['ARCH_ST']=['-arch'] - v['LINKFLAGS_cstlib']=[] - v['SHLIB_MARKER']=[] - v['STLIB_MARKER']=[] - v['SONAME_ST']=[] -@conf -def gcc_modifier_aix(conf): - v=conf.env - v['LINKFLAGS_cprogram']=['-Wl,-brtl'] - v['LINKFLAGS_cshlib']=['-shared','-Wl,-brtl,-bexpfull'] - v['SHLIB_MARKER']=[] -@conf -def gcc_modifier_hpux(conf): - v=conf.env - v['SHLIB_MARKER']=[] - v['STLIB_MARKER']=[] - v['CFLAGS_cshlib']=['-fPIC','-DPIC'] - v['cshlib_PATTERN']='lib%s.sl' -@conf -def gcc_modifier_openbsd(conf): - conf.env.SONAME_ST=[] -@conf -def gcc_modifier_osf1V(conf): - v=conf.env - v['SHLIB_MARKER']=[] - v['STLIB_MARKER']=[] - v['SONAME_ST']=[] -@conf -def gcc_modifier_platform(conf): - gcc_modifier_func=getattr(conf,'gcc_modifier_'+conf.env.DEST_OS,None) - if gcc_modifier_func: - gcc_modifier_func() -def configure(conf): - conf.find_gcc() - conf.find_ar() - conf.gcc_common_flags() - conf.gcc_modifier_platform() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py deleted file mode 100644 index acfea4a2..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gdc.py +++ /dev/null @@ -1,35 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ar,d -from waflib.Configure import conf -@conf -def find_gdc(conf): - conf.find_program('gdc',var='D') - out=conf.cmd_and_log(conf.env.D+['--version']) - if out.find("gdc")==-1: - conf.fatal("detected compiler is not gdc") -@conf -def common_flags_gdc(conf): - v=conf.env - v['DFLAGS']=[] - v['D_SRC_F']=['-c'] - v['D_TGT_F']='-o%s' - v['D_LINKER']=v['D'] - v['DLNK_SRC_F']='' - v['DLNK_TGT_F']='-o%s' - v['DINC_ST']='-I%s' - v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' - v['DSTLIB_ST']=v['DSHLIB_ST']='-l%s' - v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L%s' - v['LINKFLAGS_dshlib']=['-shared'] - v['DHEADER_ext']='.di' - v.DFLAGS_d_with_header='-fintfc' - v['D_HDR_F']='-fintfc-file=%s' -def configure(conf): - conf.find_gdc() - conf.load('ar') - conf.load('d') - conf.common_flags_gdc() - conf.d_platform_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py deleted file mode 100644 index a0ea00b6..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gfortran.py +++ /dev/null @@ -1,68 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib import Utils -from waflib.Tools import fc,fc_config,fc_scan,ar -from waflib.Configure import conf -@conf -def find_gfortran(conf): - fc=conf.find_program(['gfortran','g77'],var='FC') - conf.get_gfortran_version(fc) - conf.env.FC_NAME='GFORTRAN' -@conf -def gfortran_flags(conf): - v=conf.env - v['FCFLAGS_fcshlib']=['-fPIC'] - v['FORTRANMODFLAG']=['-J',''] - v['FCFLAGS_DEBUG']=['-Werror'] -@conf -def gfortran_modifier_win32(conf): - fc_config.fortran_modifier_win32(conf) -@conf -def gfortran_modifier_cygwin(conf): - fc_config.fortran_modifier_cygwin(conf) -@conf -def gfortran_modifier_darwin(conf): - fc_config.fortran_modifier_darwin(conf) -@conf -def gfortran_modifier_platform(conf): - dest_os=conf.env['DEST_OS']or Utils.unversioned_sys_platform() - gfortran_modifier_func=getattr(conf,'gfortran_modifier_'+dest_os,None) - if gfortran_modifier_func: - gfortran_modifier_func() -@conf -def get_gfortran_version(conf,fc): - version_re=re.compile(r"GNU\s*Fortran",re.I).search - cmd=fc+['--version'] - out,err=fc_config.getoutput(conf,cmd,stdin=False) - if out:match=version_re(out) - else:match=version_re(err) - if not match: - conf.fatal('Could not determine the compiler type') - cmd=fc+['-dM','-E','-'] - out,err=fc_config.getoutput(conf,cmd,stdin=True) - if out.find('__GNUC__')<0: - conf.fatal('Could not determine the compiler type') - k={} - out=out.splitlines() - import shlex - for line in out: - lst=shlex.split(line) - if len(lst)>2: - key=lst[1] - val=lst[2] - k[key]=val - def isD(var): - return var in k - def isT(var): - return var in k and k[var]!='0' - conf.env['FC_VERSION']=(k['__GNUC__'],k['__GNUC_MINOR__'],k['__GNUC_PATCHLEVEL__']) -def configure(conf): - conf.find_gfortran() - conf.find_ar() - conf.fc_flags() - conf.fc_add_flags() - conf.gfortran_flags() - conf.gfortran_modifier_platform() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py deleted file mode 100644 index 47ee8235..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/glib2.py +++ /dev/null @@ -1,234 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os -from waflib import Context,Task,Utils,Options,Errors,Logs -from waflib.TaskGen import taskgen_method,before_method,feature,extension -from waflib.Configure import conf -@taskgen_method -def add_marshal_file(self,filename,prefix): - if not hasattr(self,'marshal_list'): - self.marshal_list=[] - self.meths.append('process_marshal') - self.marshal_list.append((filename,prefix)) -@before_method('process_source') -def process_marshal(self): - for f,prefix in getattr(self,'marshal_list',[]): - node=self.path.find_resource(f) - if not node: - raise Errors.WafError('file not found %r'%f) - h_node=node.change_ext('.h') - c_node=node.change_ext('.c') - task=self.create_task('glib_genmarshal',node,[h_node,c_node]) - task.env.GLIB_GENMARSHAL_PREFIX=prefix - self.source=self.to_nodes(getattr(self,'source',[])) - self.source.append(c_node) -class glib_genmarshal(Task.Task): - def run(self): - bld=self.inputs[0].__class__.ctx - get=self.env.get_flat - cmd1="%s %s --prefix=%s --header > %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[0].abspath()) - ret=bld.exec_command(cmd1) - if ret:return ret - c='''#include "%s"\n'''%self.outputs[0].name - self.outputs[1].write(c) - cmd2="%s %s --prefix=%s --body >> %s"%(get('GLIB_GENMARSHAL'),self.inputs[0].srcpath(),get('GLIB_GENMARSHAL_PREFIX'),self.outputs[1].abspath()) - return bld.exec_command(cmd2) - vars=['GLIB_GENMARSHAL_PREFIX','GLIB_GENMARSHAL'] - color='BLUE' - ext_out=['.h'] -@taskgen_method -def add_enums_from_template(self,source='',target='',template='',comments=''): - if not hasattr(self,'enums_list'): - self.enums_list=[] - self.meths.append('process_enums') - self.enums_list.append({'source':source,'target':target,'template':template,'file-head':'','file-prod':'','file-tail':'','enum-prod':'','value-head':'','value-prod':'','value-tail':'','comments':comments}) -@taskgen_method -def add_enums(self,source='',target='',file_head='',file_prod='',file_tail='',enum_prod='',value_head='',value_prod='',value_tail='',comments=''): - if not hasattr(self,'enums_list'): - self.enums_list=[] - self.meths.append('process_enums') - self.enums_list.append({'source':source,'template':'','target':target,'file-head':file_head,'file-prod':file_prod,'file-tail':file_tail,'enum-prod':enum_prod,'value-head':value_head,'value-prod':value_prod,'value-tail':value_tail,'comments':comments}) -@before_method('process_source') -def process_enums(self): - for enum in getattr(self,'enums_list',[]): - task=self.create_task('glib_mkenums') - env=task.env - inputs=[] - source_list=self.to_list(enum['source']) - if not source_list: - raise Errors.WafError('missing source '+str(enum)) - source_list=[self.path.find_resource(k)for k in source_list] - inputs+=source_list - env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] - if not enum['target']: - raise Errors.WafError('missing target '+str(enum)) - tgt_node=self.path.find_or_declare(enum['target']) - if tgt_node.name.endswith('.c'): - self.source.append(tgt_node) - env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() - options=[] - if enum['template']: - template_node=self.path.find_resource(enum['template']) - options.append('--template %s'%(template_node.abspath())) - inputs.append(template_node) - params={'file-head':'--fhead','file-prod':'--fprod','file-tail':'--ftail','enum-prod':'--eprod','value-head':'--vhead','value-prod':'--vprod','value-tail':'--vtail','comments':'--comments'} - for param,option in params.items(): - if enum[param]: - options.append('%s %r'%(option,enum[param])) - env['GLIB_MKENUMS_OPTIONS']=' '.join(options) - task.set_inputs(inputs) - task.set_outputs(tgt_node) -class glib_mkenums(Task.Task): - run_str='${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}' - color='PINK' - ext_out=['.h'] -@taskgen_method -def add_settings_schemas(self,filename_list): - if not hasattr(self,'settings_schema_files'): - self.settings_schema_files=[] - if not isinstance(filename_list,list): - filename_list=[filename_list] - self.settings_schema_files.extend(filename_list) -@taskgen_method -def add_settings_enums(self,namespace,filename_list): - if hasattr(self,'settings_enum_namespace'): - raise Errors.WafError("Tried to add gsettings enums to '%s' more than once"%self.name) - self.settings_enum_namespace=namespace - if type(filename_list)!='list': - filename_list=[filename_list] - self.settings_enum_files=filename_list -@feature('glib2') -def process_settings(self): - enums_tgt_node=[] - install_files=[] - settings_schema_files=getattr(self,'settings_schema_files',[]) - if settings_schema_files and not self.env['GLIB_COMPILE_SCHEMAS']: - raise Errors.WafError("Unable to process GSettings schemas - glib-compile-schemas was not found during configure") - if hasattr(self,'settings_enum_files'): - enums_task=self.create_task('glib_mkenums') - source_list=self.settings_enum_files - source_list=[self.path.find_resource(k)for k in source_list] - enums_task.set_inputs(source_list) - enums_task.env['GLIB_MKENUMS_SOURCE']=[k.abspath()for k in source_list] - target=self.settings_enum_namespace+'.enums.xml' - tgt_node=self.path.find_or_declare(target) - enums_task.set_outputs(tgt_node) - enums_task.env['GLIB_MKENUMS_TARGET']=tgt_node.abspath() - enums_tgt_node=[tgt_node] - install_files.append(tgt_node) - options='--comments "" --fhead "" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " " --vtail " " --ftail "" '%(self.settings_enum_namespace) - enums_task.env['GLIB_MKENUMS_OPTIONS']=options - for schema in settings_schema_files: - schema_task=self.create_task('glib_validate_schema') - schema_node=self.path.find_resource(schema) - if not schema_node: - raise Errors.WafError("Cannot find the schema file '%s'"%schema) - install_files.append(schema_node) - source_list=enums_tgt_node+[schema_node] - schema_task.set_inputs(source_list) - schema_task.env['GLIB_COMPILE_SCHEMAS_OPTIONS']=[("--schema-file="+k.abspath())for k in source_list] - target_node=schema_node.change_ext('.xml.valid') - schema_task.set_outputs(target_node) - schema_task.env['GLIB_VALIDATE_SCHEMA_OUTPUT']=target_node.abspath() - def compile_schemas_callback(bld): - if not bld.is_install:return - Logs.pprint('YELLOW','Updating GSettings schema cache') - command=Utils.subst_vars("${GLIB_COMPILE_SCHEMAS} ${GSETTINGSSCHEMADIR}",bld.env) - self.bld.exec_command(command) - if self.bld.is_install: - if not self.env['GSETTINGSSCHEMADIR']: - raise Errors.WafError('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)') - if install_files: - self.bld.install_files(self.env['GSETTINGSSCHEMADIR'],install_files) - if not hasattr(self.bld,'_compile_schemas_registered'): - self.bld.add_post_fun(compile_schemas_callback) - self.bld._compile_schemas_registered=True -class glib_validate_schema(Task.Task): - run_str='rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}' - color='PINK' -@extension('.gresource.xml') -def process_gresource_source(self,node): - if not self.env['GLIB_COMPILE_RESOURCES']: - raise Errors.WafError("Unable to process GResource file - glib-compile-resources was not found during configure") - if'gresource'in self.features: - return - h_node=node.change_ext('_xml.h') - c_node=node.change_ext('_xml.c') - self.create_task('glib_gresource_source',node,[h_node,c_node]) - self.source.append(c_node) -@feature('gresource') -def process_gresource_bundle(self): - for i in self.to_list(self.source): - node=self.path.find_resource(i) - task=self.create_task('glib_gresource_bundle',node,node.change_ext('')) - inst_to=getattr(self,'install_path',None) - if inst_to: - self.bld.install_files(inst_to,task.outputs) -class glib_gresource_base(Task.Task): - color='BLUE' - base_cmd='${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}' - def scan(self): - bld=self.generator.bld - kw={} - try: - if not kw.get('cwd',None): - kw['cwd']=bld.cwd - except AttributeError: - bld.cwd=kw['cwd']=bld.variant_dir - kw['quiet']=Context.BOTH - cmd=Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s'%(self.inputs[0].parent.srcpath(),self.inputs[0].bld_dir(),self.inputs[0].bldpath()),self.env) - output=bld.cmd_and_log(cmd,**kw) - nodes=[] - names=[] - for dep in output.splitlines(): - if dep: - node=bld.bldnode.find_node(dep) - if node: - nodes.append(node) - else: - names.append(dep) - return(nodes,names) -class glib_gresource_source(glib_gresource_base): - vars=['GLIB_COMPILE_RESOURCES'] - fun_h=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[0].abspath()} --generate-header ${SRC}') - fun_c=Task.compile_fun_shell(glib_gresource_base.base_cmd+' --target=${TGT[1].abspath()} --generate-source ${SRC}') - ext_out=['.h'] - def run(self): - return self.fun_h[0](self)or self.fun_c[0](self) -class glib_gresource_bundle(glib_gresource_base): - run_str=glib_gresource_base.base_cmd+' --target=${TGT} ${SRC}' - shell=True -@conf -def find_glib_genmarshal(conf): - conf.find_program('glib-genmarshal',var='GLIB_GENMARSHAL') -@conf -def find_glib_mkenums(conf): - if not conf.env.PERL: - conf.find_program('perl',var='PERL') - conf.find_program('glib-mkenums',interpreter='PERL',var='GLIB_MKENUMS') -@conf -def find_glib_compile_schemas(conf): - conf.find_program('glib-compile-schemas',var='GLIB_COMPILE_SCHEMAS') - def getstr(varname): - return getattr(Options.options,varname,getattr(conf.env,varname,'')) - gsettingsschemadir=getstr('GSETTINGSSCHEMADIR') - if not gsettingsschemadir: - datadir=getstr('DATADIR') - if not datadir: - prefix=conf.env['PREFIX'] - datadir=os.path.join(prefix,'share') - gsettingsschemadir=os.path.join(datadir,'glib-2.0','schemas') - conf.env['GSETTINGSSCHEMADIR']=gsettingsschemadir -@conf -def find_glib_compile_resources(conf): - conf.find_program('glib-compile-resources',var='GLIB_COMPILE_RESOURCES') -def configure(conf): - conf.find_glib_genmarshal() - conf.find_glib_mkenums() - conf.find_glib_compile_schemas(mandatory=False) - conf.find_glib_compile_resources(mandatory=False) -def options(opt): - gr=opt.add_option_group('Installation directories') - gr.add_option('--gsettingsschemadir',help='GSettings schema location [DATADIR/glib-2.0/schemas]',default='',dest='GSETTINGSSCHEMADIR') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py deleted file mode 100644 index 21a62885..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/gnu_dirs.py +++ /dev/null @@ -1,66 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re -from waflib import Utils,Options,Context -gnuopts=''' -bindir, user commands, ${EXEC_PREFIX}/bin -sbindir, system binaries, ${EXEC_PREFIX}/sbin -libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec -sysconfdir, host-specific configuration, ${PREFIX}/etc -sharedstatedir, architecture-independent variable data, ${PREFIX}/com -localstatedir, variable data, ${PREFIX}/var -libdir, object code libraries, ${EXEC_PREFIX}/lib%s -includedir, header files, ${PREFIX}/include -oldincludedir, header files for non-GCC compilers, /usr/include -datarootdir, architecture-independent data root, ${PREFIX}/share -datadir, architecture-independent data, ${DATAROOTDIR} -infodir, GNU "info" documentation, ${DATAROOTDIR}/info -localedir, locale-dependent data, ${DATAROOTDIR}/locale -mandir, manual pages, ${DATAROOTDIR}/man -docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE} -htmldir, HTML documentation, ${DOCDIR} -dvidir, DVI documentation, ${DOCDIR} -pdfdir, PDF documentation, ${DOCDIR} -psdir, PostScript documentation, ${DOCDIR} -'''%Utils.lib64() -_options=[x.split(', ')for x in gnuopts.splitlines()if x] -def configure(conf): - def get_param(varname,default): - return getattr(Options.options,varname,'')or default - env=conf.env - env.LIBDIR=env.BINDIR=[] - env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX) - env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE - complete=False - iter=0 - while not complete and iter\d*)\.(?P\d*)",re.I).search - if Utils.is_win32: - cmd=fc - else: - cmd=fc+['-logo'] - out,err=fc_config.getoutput(conf,cmd,stdin=False) - match=version_re(out)or version_re(err) - if not match: - conf.fatal('cannot determine ifort version.') - k=match.groupdict() - conf.env['FC_VERSION']=(k['major'],k['minor']) -def configure(conf): - conf.find_ifort() - conf.find_program('xiar',var='AR') - conf.find_ar() - conf.fc_flags() - conf.fc_add_flags() - conf.ifort_modifier_platform() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py deleted file mode 100644 index c751e266..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/intltool.py +++ /dev/null @@ -1,97 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re -from waflib import Context,Task,Utils,Logs -import waflib.Tools.ccroot -from waflib.TaskGen import feature,before_method,taskgen_method -from waflib.Logs import error -from waflib.Configure import conf -_style_flags={'ba':'-b','desktop':'-d','keys':'-k','quoted':'--quoted-style','quotedxml':'--quotedxml-style','rfc822deb':'-r','schemas':'-s','xml':'-x',} -@taskgen_method -def ensure_localedir(self): - if not self.env.LOCALEDIR: - if self.env.DATAROOTDIR: - self.env.LOCALEDIR=os.path.join(self.env.DATAROOTDIR,'locale') - else: - self.env.LOCALEDIR=os.path.join(self.env.PREFIX,'share','locale') -@before_method('process_source') -@feature('intltool_in') -def apply_intltool_in_f(self): - try:self.meths.remove('process_source') - except ValueError:pass - self.ensure_localedir() - podir=getattr(self,'podir','.') - podirnode=self.path.find_dir(podir) - if not podirnode: - error("could not find the podir %r"%podir) - return - cache=getattr(self,'intlcache','.intlcache') - self.env.INTLCACHE=[os.path.join(str(self.path.get_bld()),podir,cache)] - self.env.INTLPODIR=podirnode.bldpath() - self.env.append_value('INTLFLAGS',getattr(self,'flags',self.env.INTLFLAGS_DEFAULT)) - if'-c'in self.env.INTLFLAGS: - self.bld.fatal('Redundant -c flag in intltool task %r'%self) - style=getattr(self,'style',None) - if style: - try: - style_flag=_style_flags[style] - except KeyError: - self.bld.fatal('intltool_in style "%s" is not valid'%style) - self.env.append_unique('INTLFLAGS',[style_flag]) - for i in self.to_list(self.source): - node=self.path.find_resource(i) - task=self.create_task('intltool',node,node.change_ext('')) - inst=getattr(self,'install_path',None) - if inst: - self.bld.install_files(inst,task.outputs) -@feature('intltool_po') -def apply_intltool_po(self): - try:self.meths.remove('process_source') - except ValueError:pass - self.ensure_localedir() - appname=getattr(self,'appname',getattr(Context.g_module,Context.APPNAME,'set_your_app_name')) - podir=getattr(self,'podir','.') - inst=getattr(self,'install_path','${LOCALEDIR}') - linguas=self.path.find_node(os.path.join(podir,'LINGUAS')) - if linguas: - file=open(linguas.abspath()) - langs=[] - for line in file.readlines(): - if not line.startswith('#'): - langs+=line.split() - file.close() - re_linguas=re.compile('[-a-zA-Z_@.]+') - for lang in langs: - if re_linguas.match(lang): - node=self.path.find_resource(os.path.join(podir,re_linguas.match(lang).group()+'.po')) - task=self.create_task('po',node,node.change_ext('.mo')) - if inst: - filename=task.outputs[0].name - (langname,ext)=os.path.splitext(filename) - inst_file=inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+appname+'.mo' - self.bld.install_as(inst_file,task.outputs[0],chmod=getattr(self,'chmod',Utils.O644),env=task.env) - else: - Logs.pprint('RED',"Error no LINGUAS file found in po directory") -class po(Task.Task): - run_str='${MSGFMT} -o ${TGT} ${SRC}' - color='BLUE' -class intltool(Task.Task): - run_str='${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}' - color='BLUE' -@conf -def find_msgfmt(conf): - conf.find_program('msgfmt',var='MSGFMT') -@conf -def find_intltool_merge(conf): - if not conf.env.PERL: - conf.find_program('perl',var='PERL') - conf.env.INTLCACHE_ST='--cache=%s' - conf.env.INTLFLAGS_DEFAULT=['-q','-u'] - conf.find_program('intltool-merge',interpreter='PERL',var='INTLTOOL') -def configure(conf): - conf.find_msgfmt() - conf.find_intltool_merge() - if conf.env.CC or conf.env.CXX: - conf.check(header_name='locale.h') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py deleted file mode 100644 index 74a36cf9..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/irixcc.py +++ /dev/null @@ -1,45 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar -from waflib.Configure import conf -@conf -def find_irixcc(conf): - v=conf.env - cc=None - if v['CC']:cc=v['CC'] - elif'CC'in conf.environ:cc=conf.environ['CC'] - if not cc:cc=conf.find_program('cc',var='CC') - if not cc:conf.fatal('irixcc was not found') - try: - conf.cmd_and_log(cc+['-version']) - except Exception: - conf.fatal('%r -version could not be executed'%cc) - v['CC']=cc - v['CC_NAME']='irix' -@conf -def irixcc_common_flags(conf): - v=conf.env - v['CC_SRC_F']='' - v['CC_TGT_F']=['-c','-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - if not v['LINK_CC']:v['LINK_CC']=v['CC'] - v['CCLNK_SRC_F']='' - v['CCLNK_TGT_F']=['-o'] - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['cprogram_PATTERN']='%s' - v['cshlib_PATTERN']='lib%s.so' - v['cstlib_PATTERN']='lib%s.a' -def configure(conf): - conf.find_irixcc() - conf.find_cpp() - conf.find_ar() - conf.irixcc_common_flags() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py deleted file mode 100644 index 9009a7ae..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/javaw.py +++ /dev/null @@ -1,305 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,tempfile,shutil -from waflib import Task,Utils,Errors,Node,Logs -from waflib.Configure import conf -from waflib.TaskGen import feature,before_method,after_method -from waflib.Tools import ccroot -ccroot.USELIB_VARS['javac']=set(['CLASSPATH','JAVACFLAGS']) -SOURCE_RE='**/*.java' -JAR_RE='**/*' -class_check_source=''' -public class Test { - public static void main(String[] argv) { - Class lib; - if (argv.length < 1) { - System.err.println("Missing argument"); - System.exit(77); - } - try { - lib = Class.forName(argv[0]); - } catch (ClassNotFoundException e) { - System.err.println("ClassNotFoundException"); - System.exit(1); - } - lib = null; - System.exit(0); - } -} -''' -@feature('javac') -@before_method('process_source') -def apply_java(self): - Utils.def_attrs(self,jarname='',classpath='',sourcepath='.',srcdir='.',jar_mf_attributes={},jar_mf_classpath=[]) - outdir=getattr(self,'outdir',None) - if outdir: - if not isinstance(outdir,Node.Node): - outdir=self.path.get_bld().make_node(self.outdir) - else: - outdir=self.path.get_bld() - outdir.mkdir() - self.outdir=outdir - self.env['OUTDIR']=outdir.abspath() - self.javac_task=tsk=self.create_task('javac') - tmp=[] - srcdir=getattr(self,'srcdir','') - if isinstance(srcdir,Node.Node): - srcdir=[srcdir] - for x in Utils.to_list(srcdir): - if isinstance(x,Node.Node): - y=x - else: - y=self.path.find_dir(x) - if not y: - self.bld.fatal('Could not find the folder %s from %s'%(x,self.path)) - tmp.append(y) - tsk.srcdir=tmp - if getattr(self,'compat',None): - tsk.env.append_value('JAVACFLAGS',['-source',self.compat]) - if hasattr(self,'sourcepath'): - fold=[isinstance(x,Node.Node)and x or self.path.find_dir(x)for x in self.to_list(self.sourcepath)] - names=os.pathsep.join([x.srcpath()for x in fold]) - else: - names=[x.srcpath()for x in tsk.srcdir] - if names: - tsk.env.append_value('JAVACFLAGS',['-sourcepath',names]) -@feature('javac') -@after_method('apply_java') -def use_javac_files(self): - lst=[] - self.uselib=self.to_list(getattr(self,'uselib',[])) - names=self.to_list(getattr(self,'use',[])) - get=self.bld.get_tgen_by_name - for x in names: - try: - y=get(x) - except Exception: - self.uselib.append(x) - else: - y.post() - lst.append(y.jar_task.outputs[0].abspath()) - self.javac_task.set_run_after(y.jar_task) - if lst: - self.env.append_value('CLASSPATH',lst) -@feature('javac') -@after_method('apply_java','propagate_uselib_vars','use_javac_files') -def set_classpath(self): - self.env.append_value('CLASSPATH',getattr(self,'classpath',[])) - for x in self.tasks: - x.env.CLASSPATH=os.pathsep.join(self.env.CLASSPATH)+os.pathsep -@feature('jar') -@after_method('apply_java','use_javac_files') -@before_method('process_source') -def jar_files(self): - destfile=getattr(self,'destfile','test.jar') - jaropts=getattr(self,'jaropts',[]) - manifest=getattr(self,'manifest',None) - basedir=getattr(self,'basedir',None) - if basedir: - if not isinstance(self.basedir,Node.Node): - basedir=self.path.get_bld().make_node(basedir) - else: - basedir=self.path.get_bld() - if not basedir: - self.bld.fatal('Could not find the basedir %r for %r'%(self.basedir,self)) - self.jar_task=tsk=self.create_task('jar_create') - if manifest: - jarcreate=getattr(self,'jarcreate','cfm') - node=self.path.find_node(manifest) - tsk.dep_nodes.append(node) - jaropts.insert(0,node.abspath()) - else: - jarcreate=getattr(self,'jarcreate','cf') - if not isinstance(destfile,Node.Node): - destfile=self.path.find_or_declare(destfile) - if not destfile: - self.bld.fatal('invalid destfile %r for %r'%(destfile,self)) - tsk.set_outputs(destfile) - tsk.basedir=basedir - jaropts.append('-C') - jaropts.append(basedir.bldpath()) - jaropts.append('.') - tsk.env['JAROPTS']=jaropts - tsk.env['JARCREATE']=jarcreate - if getattr(self,'javac_task',None): - tsk.set_run_after(self.javac_task) -@feature('jar') -@after_method('jar_files') -def use_jar_files(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - names=self.to_list(getattr(self,'use',[])) - get=self.bld.get_tgen_by_name - for x in names: - try: - y=get(x) - except Exception: - self.uselib.append(x) - else: - y.post() - self.jar_task.run_after.update(y.tasks) -class jar_create(Task.Task): - color='GREEN' - run_str='${JAR} ${JARCREATE} ${TGT} ${JAROPTS}' - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - if not self.inputs: - global JAR_RE - try: - self.inputs=[x for x in self.basedir.ant_glob(JAR_RE,remove=False)if id(x)!=id(self.outputs[0])] - except Exception: - raise Errors.WafError('Could not find the basedir %r for %r'%(self.basedir,self)) - return super(jar_create,self).runnable_status() -class javac(Task.Task): - color='BLUE' - vars=['CLASSPATH','JAVACFLAGS','JAVAC','OUTDIR'] - def runnable_status(self): - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - if not self.inputs: - global SOURCE_RE - self.inputs=[] - for x in self.srcdir: - self.inputs.extend(x.ant_glob(SOURCE_RE,remove=False)) - return super(javac,self).runnable_status() - def run(self): - env=self.env - gen=self.generator - bld=gen.bld - wd=bld.bldnode.abspath() - def to_list(xx): - if isinstance(xx,str):return[xx] - return xx - cmd=[] - cmd.extend(to_list(env['JAVAC'])) - cmd.extend(['-classpath']) - cmd.extend(to_list(env['CLASSPATH'])) - cmd.extend(['-d']) - cmd.extend(to_list(env['OUTDIR'])) - cmd.extend(to_list(env['JAVACFLAGS'])) - files=[a.path_from(bld.bldnode)for a in self.inputs] - tmp=None - try: - if len(str(files))+len(str(cmd))>8192: - (fd,tmp)=tempfile.mkstemp(dir=bld.bldnode.abspath()) - try: - os.write(fd,'\n'.join(files).encode()) - finally: - if tmp: - os.close(fd) - if Logs.verbose: - Logs.debug('runner: %r'%(cmd+files)) - cmd.append('@'+tmp) - else: - cmd+=files - ret=self.exec_command(cmd,cwd=wd,env=env.env or None) - finally: - if tmp: - os.remove(tmp) - return ret - def post_run(self): - for n in self.generator.outdir.ant_glob('**/*.class'): - n.sig=Utils.h_file(n.abspath()) - self.generator.bld.task_sigs[self.uid()]=self.cache_sig -@feature('javadoc') -@after_method('process_rule') -def create_javadoc(self): - tsk=self.create_task('javadoc') - tsk.classpath=getattr(self,'classpath',[]) - self.javadoc_package=Utils.to_list(self.javadoc_package) - if not isinstance(self.javadoc_output,Node.Node): - self.javadoc_output=self.bld.path.find_or_declare(self.javadoc_output) -class javadoc(Task.Task): - color='BLUE' - def __str__(self): - return'%s: %s -> %s\n'%(self.__class__.__name__,self.generator.srcdir,self.generator.javadoc_output) - def run(self): - env=self.env - bld=self.generator.bld - wd=bld.bldnode.abspath() - srcpath=self.generator.path.abspath()+os.sep+self.generator.srcdir - srcpath+=os.pathsep - srcpath+=self.generator.path.get_bld().abspath()+os.sep+self.generator.srcdir - classpath=env.CLASSPATH - classpath+=os.pathsep - classpath+=os.pathsep.join(self.classpath) - classpath="".join(classpath) - self.last_cmd=lst=[] - lst.extend(Utils.to_list(env['JAVADOC'])) - lst.extend(['-d',self.generator.javadoc_output.abspath()]) - lst.extend(['-sourcepath',srcpath]) - lst.extend(['-classpath',classpath]) - lst.extend(['-subpackages']) - lst.extend(self.generator.javadoc_package) - lst=[x for x in lst if x] - self.generator.bld.cmd_and_log(lst,cwd=wd,env=env.env or None,quiet=0) - def post_run(self): - nodes=self.generator.javadoc_output.ant_glob('**') - for x in nodes: - x.sig=Utils.h_file(x.abspath()) - self.generator.bld.task_sigs[self.uid()]=self.cache_sig -def configure(self): - java_path=self.environ['PATH'].split(os.pathsep) - v=self.env - if'JAVA_HOME'in self.environ: - java_path=[os.path.join(self.environ['JAVA_HOME'],'bin')]+java_path - self.env['JAVA_HOME']=[self.environ['JAVA_HOME']] - for x in'javac java jar javadoc'.split(): - self.find_program(x,var=x.upper(),path_list=java_path) - if'CLASSPATH'in self.environ: - v['CLASSPATH']=self.environ['CLASSPATH'] - if not v['JAR']:self.fatal('jar is required for making java packages') - if not v['JAVAC']:self.fatal('javac is required for compiling java classes') - v['JARCREATE']='cf' - v['JAVACFLAGS']=[] -@conf -def check_java_class(self,classname,with_classpath=None): - javatestdir='.waf-javatest' - classpath=javatestdir - if self.env['CLASSPATH']: - classpath+=os.pathsep+self.env['CLASSPATH'] - if isinstance(with_classpath,str): - classpath+=os.pathsep+with_classpath - shutil.rmtree(javatestdir,True) - os.mkdir(javatestdir) - Utils.writef(os.path.join(javatestdir,'Test.java'),class_check_source) - self.exec_command(self.env['JAVAC']+[os.path.join(javatestdir,'Test.java')],shell=False) - cmd=self.env['JAVA']+['-cp',classpath,'Test',classname] - self.to_log("%s\n"%str(cmd)) - found=self.exec_command(cmd,shell=False) - self.msg('Checking for java class %s'%classname,not found) - shutil.rmtree(javatestdir,True) - return found -@conf -def check_jni_headers(conf): - if not conf.env.CC_NAME and not conf.env.CXX_NAME: - conf.fatal('load a compiler first (gcc, g++, ..)') - if not conf.env.JAVA_HOME: - conf.fatal('set JAVA_HOME in the system environment') - javaHome=conf.env['JAVA_HOME'][0] - dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/include') - if dir is None: - dir=conf.root.find_dir(conf.env.JAVA_HOME[0]+'/../Headers') - if dir is None: - conf.fatal('JAVA_HOME does not seem to be set properly') - f=dir.ant_glob('**/(jni|jni_md).h') - incDirs=[x.parent.abspath()for x in f] - dir=conf.root.find_dir(conf.env.JAVA_HOME[0]) - f=dir.ant_glob('**/*jvm.(so|dll|dylib)') - libDirs=[x.parent.abspath()for x in f]or[javaHome] - f=dir.ant_glob('**/*jvm.(lib)') - if f: - libDirs=[[x,y.parent.abspath()]for x in libDirs for y in f] - for d in libDirs: - try: - conf.check(header_name='jni.h',define_name='HAVE_JNI_H',lib='jvm',libpath=d,includes=incDirs,uselib_store='JAVA',uselib='JAVA') - except Exception: - pass - else: - break - else: - conf.fatal('could not find lib jvm in %r (see config.log)'%libDirs) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py deleted file mode 100644 index 3e90377a..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/kde4.py +++ /dev/null @@ -1,48 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re -from waflib import Task,Utils -from waflib.TaskGen import feature -@feature('msgfmt') -def apply_msgfmt(self): - for lang in self.to_list(self.langs): - node=self.path.find_resource(lang+'.po') - task=self.create_task('msgfmt',node,node.change_ext('.mo')) - langname=lang.split('/') - langname=langname[-1] - inst=getattr(self,'install_path','${KDE4_LOCALE_INSTALL_DIR}') - self.bld.install_as(inst+os.sep+langname+os.sep+'LC_MESSAGES'+os.sep+getattr(self,'appname','set_your_appname')+'.mo',task.outputs[0],chmod=getattr(self,'chmod',Utils.O644)) -class msgfmt(Task.Task): - color='BLUE' - run_str='${MSGFMT} ${SRC} -o ${TGT}' -def configure(self): - kdeconfig=self.find_program('kde4-config') - prefix=self.cmd_and_log(kdeconfig+['--prefix']).strip() - fname='%s/share/apps/cmake/modules/KDELibsDependencies.cmake'%prefix - try:os.stat(fname) - except OSError: - fname='%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake'%prefix - try:os.stat(fname) - except OSError:self.fatal('could not open %s'%fname) - try: - txt=Utils.readf(fname) - except EnvironmentError: - self.fatal('could not read %s'%fname) - txt=txt.replace('\\\n','\n') - fu=re.compile('#(.*)\n') - txt=fu.sub('',txt) - setregexp=re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)') - found=setregexp.findall(txt) - for(_,key,val)in found: - self.env[key]=val - self.env['LIB_KDECORE']=['kdecore'] - self.env['LIB_KDEUI']=['kdeui'] - self.env['LIB_KIO']=['kio'] - self.env['LIB_KHTML']=['khtml'] - self.env['LIB_KPARTS']=['kparts'] - self.env['LIBPATH_KDECORE']=[os.path.join(self.env.KDE4_LIB_INSTALL_DIR,'kde4','devel'),self.env.KDE4_LIB_INSTALL_DIR] - self.env['INCLUDES_KDECORE']=[self.env['KDE4_INCLUDE_INSTALL_DIR']] - self.env.append_value('INCLUDES_KDECORE',[self.env['KDE4_INCLUDE_INSTALL_DIR']+os.sep+'KDE']) - self.find_program('msgfmt',var='MSGFMT') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py deleted file mode 100644 index 75162e4d..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ldc2.py +++ /dev/null @@ -1,36 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ar,d -from waflib.Configure import conf -@conf -def find_ldc2(conf): - conf.find_program(['ldc2'],var='D') - out=conf.cmd_and_log(conf.env.D+['-version']) - if out.find("based on DMD v2.")==-1: - conf.fatal("detected compiler is not ldc2") -@conf -def common_flags_ldc2(conf): - v=conf.env - v['D_SRC_F']=['-c'] - v['D_TGT_F']='-of%s' - v['D_LINKER']=v['D'] - v['DLNK_SRC_F']='' - v['DLNK_TGT_F']='-of%s' - v['DINC_ST']='-I%s' - v['DSHLIB_MARKER']=v['DSTLIB_MARKER']='' - v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s' - v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s' - v['LINKFLAGS_dshlib']=['-L-shared'] - v['DHEADER_ext']='.di' - v['DFLAGS_d_with_header']=['-H','-Hf'] - v['D_HDR_F']='%s' - v['LINKFLAGS']=[] - v['DFLAGS_dshlib']=['-relocation-model=pic'] -def configure(conf): - conf.find_ldc2() - conf.load('ar') - conf.load('d') - conf.common_flags_ldc2() - conf.d_platform_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py deleted file mode 100644 index b801d5fa..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/lua.py +++ /dev/null @@ -1,18 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.TaskGen import extension -from waflib import Task -@extension('.lua') -def add_lua(self,node): - tsk=self.create_task('luac',node,node.change_ext('.luac')) - inst_to=getattr(self,'install_path',self.env.LUADIR and'${LUADIR}'or None) - if inst_to: - self.bld.install_files(inst_to,tsk.outputs) - return tsk -class luac(Task.Task): - run_str='${LUAC} -s -o ${TGT} ${SRC}' - color='PINK' -def configure(conf): - conf.find_program('luac',var='LUAC') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py deleted file mode 100644 index 5a194706..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/msvc.py +++ /dev/null @@ -1,809 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,sys,re,tempfile -from waflib import Utils,Task,Logs,Options,Errors -from waflib.Logs import debug,warn -from waflib.TaskGen import after_method,feature -from waflib.Configure import conf -from waflib.Tools import ccroot,c,cxx,ar,winres -g_msvc_systemlibs=''' -aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet -cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs -credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d -ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp -faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid -gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop -kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi -mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree -msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm -netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp -odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32 -osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu -ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm -rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32 -shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32 -traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg -version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm -wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp -'''.split() -all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm'),('amd64_x86','x86'),('amd64_arm','arm')] -all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')] -all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')] -def options(opt): - opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='') - opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='') - opt.add_option('--msvc_lazy_autodetect',action='store_true',help='lazily check msvc target environments') -def setup_msvc(conf,versions,arch=False): - platforms=getattr(Options.options,'msvc_targets','').split(',') - if platforms==['']: - platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms] - desired_versions=getattr(Options.options,'msvc_version','').split(',') - if desired_versions==['']: - desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1] - versiondict=dict(versions) - for version in desired_versions: - try: - targets=dict(versiondict[version]) - for target in platforms: - try: - try: - realtarget,(p1,p2,p3)=targets[target] - except conf.errors.ConfigurationError: - del(targets[target]) - else: - compiler,revision=version.rsplit(' ',1) - if arch: - return compiler,revision,p1,p2,p3,realtarget - else: - return compiler,revision,p1,p2,p3 - except KeyError:continue - except KeyError:continue - conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)') -@conf -def get_msvc_version(conf,compiler,version,target,vcvars): - debug('msvc: get_msvc_version: %r %r %r',compiler,version,target) - try: - conf.msvc_cnt+=1 - except AttributeError: - conf.msvc_cnt=1 - batfile=conf.bldnode.make_node('waf-print-msvc-%d.bat'%conf.msvc_cnt) - batfile.write("""@echo off -set INCLUDE= -set LIB= -call "%s" %s -echo PATH=%%PATH%% -echo INCLUDE=%%INCLUDE%% -echo LIB=%%LIB%%;%%LIBPATH%% -"""%(vcvars,target)) - sout=conf.cmd_and_log(['cmd.exe','/E:on','/V:on','/C',batfile.abspath()]) - lines=sout.splitlines() - if not lines[0]: - lines.pop(0) - MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None - for line in lines: - if line.startswith('PATH='): - path=line[5:] - MSVC_PATH=path.split(';') - elif line.startswith('INCLUDE='): - MSVC_INCDIR=[i for i in line[8:].split(';')if i] - elif line.startswith('LIB='): - MSVC_LIBDIR=[i for i in line[4:].split(';')if i] - if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR): - conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)') - env=dict(os.environ) - env.update(PATH=path) - compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) - cxx=conf.find_program(compiler_name,path_list=MSVC_PATH) - if'CL'in env: - del(env['CL']) - try: - try: - conf.cmd_and_log(cxx+['/help'],env=env) - except UnicodeError: - st=Utils.ex_stack() - if conf.logger: - conf.logger.error(st) - conf.fatal('msvc: Unicode error - check the code page?') - except Exception as e: - debug('msvc: get_msvc_version: %r %r %r -> failure %s'%(compiler,version,target,str(e))) - conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)') - else: - debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target) - finally: - conf.env[compiler_name]='' - return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR) -@conf -def gather_wsdk_versions(conf,versions): - version_pattern=re.compile('^v..?.?\...?.?') - try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows') - except WindowsError: - try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows') - except WindowsError: - return - index=0 - while 1: - try: - version=Utils.winreg.EnumKey(all_versions,index) - except WindowsError: - break - index=index+1 - if not version_pattern.match(version): - continue - try: - msvc_version=Utils.winreg.OpenKey(all_versions,version) - path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder') - except WindowsError: - continue - if path and os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')): - targets=[] - for target,arch in all_msvc_platforms: - try: - targets.append((target,(arch,get_compiler_env(conf,'wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd'))))) - except conf.errors.ConfigurationError: - pass - versions.append(('wsdk '+version[1:],targets)) -def gather_wince_supported_platforms(): - supported_wince_platforms=[] - try: - ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs') - except WindowsError: - try: - ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs') - except WindowsError: - ce_sdk='' - if not ce_sdk: - return supported_wince_platforms - ce_index=0 - while 1: - try: - sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index) - except WindowsError: - break - ce_index=ce_index+1 - sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device) - try: - path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir') - except WindowsError: - try: - path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation') - path,xml=os.path.split(path) - except WindowsError: - continue - path=str(path) - path,device=os.path.split(path) - if not device: - path,device=os.path.split(path) - platforms=[] - for arch,compiler in all_wince_platforms: - if os.path.isdir(os.path.join(path,device,'Lib',arch)): - platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch))) - if platforms: - supported_wince_platforms.append((device,platforms)) - return supported_wince_platforms -def gather_msvc_detected_versions(): - version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$') - detected_versions=[] - for vcver,vcvar in(('VCExpress','Exp'),('VisualStudio','')): - try: - prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) - except WindowsError: - try: - prefix='SOFTWARE\\Microsoft\\'+vcver - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix) - except WindowsError: - continue - index=0 - while 1: - try: - version=Utils.winreg.EnumKey(all_versions,index) - except WindowsError: - break - index=index+1 - match=version_pattern.match(version) - if not match: - continue - else: - versionnumber=float(match.group(1)) - detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version)) - def fun(tup): - return tup[0] - detected_versions.sort(key=fun) - return detected_versions -def get_compiler_env(conf,compiler,version,bat_target,bat,select=None): - lazy=getattr(Options.options,'msvc_lazy_autodetect',False)or conf.env['MSVC_LAZY_AUTODETECT'] - def msvc_thunk(): - vs=conf.get_msvc_version(compiler,version,bat_target,bat) - if select: - return select(vs) - else: - return vs - return lazytup(msvc_thunk,lazy,([],[],[])) -class lazytup(object): - def __init__(self,fn,lazy=True,default=None): - self.fn=fn - self.default=default - if not lazy: - self.evaluate() - def __len__(self): - self.evaluate() - return len(self.value) - def __iter__(self): - self.evaluate() - for i,v in enumerate(self.value): - yield v - def __getitem__(self,i): - self.evaluate() - return self.value[i] - def __repr__(self): - if hasattr(self,'value'): - return repr(self.value) - elif self.default: - return repr(self.default) - else: - self.evaluate() - return repr(self.value) - def evaluate(self): - if hasattr(self,'value'): - return - self.value=self.fn() -@conf -def gather_msvc_targets(conf,versions,version,vc_path): - targets=[] - if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')): - for target,realtarget in all_msvc_platforms[::-1]: - try: - targets.append((target,(realtarget,get_compiler_env(conf,'msvc',version,target,os.path.join(vc_path,'vcvarsall.bat'))))) - except conf.errors.ConfigurationError: - pass - elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')): - try: - targets.append(('x86',('x86',get_compiler_env(conf,'msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat'))))) - except conf.errors.ConfigurationError: - pass - elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')): - try: - targets.append(('x86',('x86',get_compiler_env(conf,'msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat'))))) - except conf.errors.ConfigurationError: - pass - if targets: - versions.append(('msvc '+version,targets)) -@conf -def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms): - for device,platforms in supported_platforms: - cetargets=[] - for platform,compiler,include,lib in platforms: - winCEpath=os.path.join(vc_path,'ce') - if not os.path.isdir(winCEpath): - continue - if os.path.isdir(os.path.join(winCEpath,'lib',platform)): - bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)] - incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include] - libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib] - def combine_common(compiler_env): - (common_bindirs,_1,_2)=compiler_env - return(bindirs+common_bindirs,incdirs,libdirs) - try: - cetargets.append((platform,(platform,get_compiler_env(conf,'msvc',version,'x86',vsvars,combine_common)))) - except conf.errors.ConfigurationError: - continue - if cetargets: - versions.append((device+' '+version,cetargets)) -@conf -def gather_winphone_targets(conf,versions,version,vc_path,vsvars): - targets=[] - for target,realtarget in all_msvc_platforms[::-1]: - try: - targets.append((target,(realtarget,get_compiler_env(conf,'winphone',version,target,vsvars)))) - except conf.errors.ConfigurationError: - pass - if targets: - versions.append(('winphone '+version,targets)) -@conf -def gather_msvc_versions(conf,versions): - vc_paths=[] - for(v,version,reg)in gather_msvc_detected_versions(): - try: - try: - msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC") - except WindowsError: - msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++") - path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir') - vc_paths.append((version,os.path.abspath(str(path)))) - except WindowsError: - continue - wince_supported_platforms=gather_wince_supported_platforms() - for version,vc_path in vc_paths: - vs_path=os.path.dirname(vc_path) - vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat') - if wince_supported_platforms and os.path.isfile(vsvars): - conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms) - for version,vc_path in vc_paths: - vs_path=os.path.dirname(vc_path) - vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat') - if os.path.isfile(vsvars): - conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars) - break - for version,vc_path in vc_paths: - vs_path=os.path.dirname(vc_path) - conf.gather_msvc_targets(versions,version,vc_path) -@conf -def gather_icl_versions(conf,versions): - version_pattern=re.compile('^...?.?\....?.?') - try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++') - except WindowsError: - try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++') - except WindowsError: - return - index=0 - while 1: - try: - version=Utils.winreg.EnumKey(all_versions,index) - except WindowsError: - break - index=index+1 - if not version_pattern.match(version): - continue - targets=[] - for target,arch in all_icl_platforms: - try: - if target=='intel64':targetDir='EM64T_NATIVE' - else:targetDir=target - Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir) - icl_version=Utils.winreg.OpenKey(all_versions,version) - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') - batch_file=os.path.join(path,'bin','iclvars.bat') - if os.path.isfile(batch_file): - try: - targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) - except conf.errors.ConfigurationError: - pass - except WindowsError: - pass - for target,arch in all_icl_platforms: - try: - icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target) - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') - batch_file=os.path.join(path,'bin','iclvars.bat') - if os.path.isfile(batch_file): - try: - targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) - except conf.errors.ConfigurationError: - pass - except WindowsError: - continue - major=version[0:2] - versions.append(('intel '+major,targets)) -@conf -def gather_intel_composer_versions(conf,versions): - version_pattern=re.compile('^...?.?\...?.?.?') - try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites') - except WindowsError: - try: - all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites') - except WindowsError: - return - index=0 - while 1: - try: - version=Utils.winreg.EnumKey(all_versions,index) - except WindowsError: - break - index=index+1 - if not version_pattern.match(version): - continue - targets=[] - for target,arch in all_icl_platforms: - try: - if target=='intel64':targetDir='EM64T_NATIVE' - else:targetDir=target - try: - defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir) - except WindowsError: - if targetDir=='EM64T_NATIVE': - defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T') - else: - raise WindowsError - uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey') - Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir) - icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++') - path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir') - batch_file=os.path.join(path,'bin','iclvars.bat') - if os.path.isfile(batch_file): - try: - targets.append((target,(arch,get_compiler_env(conf,'intel',version,target,batch_file)))) - except conf.errors.ConfigurationError: - pass - compilervars_warning_attr='_compilervars_warning_key' - if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True): - setattr(conf,compilervars_warning_attr,False) - patch_url='http://software.intel.com/en-us/forums/topic/328487' - compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat') - for vscomntool in('VS110COMNTOOLS','VS100COMNTOOLS'): - if vscomntool in os.environ: - vs_express_path=os.environ[vscomntool]+r'..\IDE\VSWinExpress.exe' - dev_env_path=os.environ[vscomntool]+r'..\IDE\devenv.exe' - if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)): - Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url)) - except WindowsError: - pass - major=version[0:2] - versions.append(('intel '+major,targets)) -@conf -def get_msvc_versions(conf,eval_and_save=True): - if conf.env['MSVC_INSTALLED_VERSIONS']: - return conf.env['MSVC_INSTALLED_VERSIONS'] - lst=[] - conf.gather_icl_versions(lst) - conf.gather_intel_composer_versions(lst) - conf.gather_wsdk_versions(lst) - conf.gather_msvc_versions(lst) - if eval_and_save: - def checked_target(t): - target,(arch,paths)=t - try: - paths.evaluate() - except conf.errors.ConfigurationError: - return None - else: - return t - lst=[(version,list(filter(checked_target,targets)))for version,targets in lst] - conf.env['MSVC_INSTALLED_VERSIONS']=lst - return lst -@conf -def print_all_msvc_detected(conf): - for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']: - Logs.info(version) - for target,l in targets: - Logs.info("\t"+target) -@conf -def detect_msvc(conf,arch=False): - lazy_detect=getattr(Options.options,'msvc_lazy_autodetect',False)or conf.env['MSVC_LAZY_AUTODETECT'] - versions=get_msvc_versions(conf,not lazy_detect) - return setup_msvc(conf,versions,arch) -@conf -def find_lt_names_msvc(self,libname,is_static=False): - lt_names=['lib%s.la'%libname,'%s.la'%libname,] - for path in self.env['LIBPATH']: - for la in lt_names: - laf=os.path.join(path,la) - dll=None - if os.path.exists(laf): - ltdict=Utils.read_la_file(laf) - lt_libdir=None - if ltdict.get('libdir',''): - lt_libdir=ltdict['libdir'] - if not is_static and ltdict.get('library_names',''): - dllnames=ltdict['library_names'].split() - dll=dllnames[0].lower() - dll=re.sub('\.dll$','',dll) - return(lt_libdir,dll,False) - elif ltdict.get('old_library',''): - olib=ltdict['old_library'] - if os.path.exists(os.path.join(path,olib)): - return(path,olib,True) - elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)): - return(lt_libdir,olib,True) - else: - return(None,olib,True) - else: - raise self.errors.WafError('invalid libtool object file: %s'%laf) - return(None,None,None) -@conf -def libname_msvc(self,libname,is_static=False): - lib=libname.lower() - lib=re.sub('\.lib$','',lib) - if lib in g_msvc_systemlibs: - return lib - lib=re.sub('^lib','',lib) - if lib=='m': - return None - (lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static) - if lt_path!=None and lt_libname!=None: - if lt_static==True: - return os.path.join(lt_path,lt_libname) - if lt_path!=None: - _libpaths=[lt_path]+self.env['LIBPATH'] - else: - _libpaths=self.env['LIBPATH'] - static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,] - dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,] - libnames=static_libs - if not is_static: - libnames=dynamic_libs+static_libs - for path in _libpaths: - for libn in libnames: - if os.path.exists(os.path.join(path,libn)): - debug('msvc: lib found: %s'%os.path.join(path,libn)) - return re.sub('\.lib$','',libn) - self.fatal("The library %r could not be found"%libname) - return re.sub('\.lib$','',libname) -@conf -def check_lib_msvc(self,libname,is_static=False,uselib_store=None): - libn=self.libname_msvc(libname,is_static) - if not uselib_store: - uselib_store=libname.upper() - if False and is_static: - self.env['STLIB_'+uselib_store]=[libn] - else: - self.env['LIB_'+uselib_store]=[libn] -@conf -def check_libs_msvc(self,libnames,is_static=False): - for libname in Utils.to_list(libnames): - self.check_lib_msvc(libname,is_static) -def configure(conf): - conf.autodetect(True) - conf.find_msvc() - conf.msvc_common_flags() - conf.cc_load_tools() - conf.cxx_load_tools() - conf.cc_add_flags() - conf.cxx_add_flags() - conf.link_add_flags() - conf.visual_studio_add_flags() -@conf -def no_autodetect(conf): - conf.env.NO_MSVC_DETECT=1 - configure(conf) -@conf -def autodetect(conf,arch=False): - v=conf.env - if v.NO_MSVC_DETECT: - return - if arch: - compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True) - v['DEST_CPU']=arch - else: - compiler,version,path,includes,libdirs=conf.detect_msvc() - v['PATH']=path - v['INCLUDES']=includes - v['LIBPATH']=libdirs - v['MSVC_COMPILER']=compiler - try: - v['MSVC_VERSION']=float(version) - except Exception: - v['MSVC_VERSION']=float(version[:-3]) -def _get_prog_names(conf,compiler): - if compiler=='intel': - compiler_name='ICL' - linker_name='XILINK' - lib_name='XILIB' - else: - compiler_name='CL' - linker_name='LINK' - lib_name='LIB' - return compiler_name,linker_name,lib_name -@conf -def find_msvc(conf): - if sys.platform=='cygwin': - conf.fatal('MSVC module does not work under cygwin Python!') - v=conf.env - path=v['PATH'] - compiler=v['MSVC_COMPILER'] - version=v['MSVC_VERSION'] - compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler) - v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11) - cxx=conf.find_program(compiler_name,var='CXX',path_list=path) - env=dict(conf.environ) - if path:env.update(PATH=';'.join(path)) - if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env): - conf.fatal('the msvc compiler could not be identified') - v['CC']=v['CXX']=cxx - v['CC_NAME']=v['CXX_NAME']='msvc' - if not v['LINK_CXX']: - link=conf.find_program(linker_name,path_list=path) - if link:v['LINK_CXX']=link - else:conf.fatal('%s was not found (linker)'%linker_name) - v['LINK']=link - if not v['LINK_CC']: - v['LINK_CC']=v['LINK_CXX'] - if not v['AR']: - stliblink=conf.find_program(lib_name,path_list=path,var='AR') - if not stliblink:return - v['ARFLAGS']=['/NOLOGO'] - if v.MSVC_MANIFEST: - conf.find_program('MT',path_list=path,var='MT') - v['MTFLAGS']=['/NOLOGO'] - try: - conf.load('winres') - except Errors.WafError: - warn('Resource compiler not found. Compiling resource file is disabled') -@conf -def visual_studio_add_flags(self): - v=self.env - try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x]) - except Exception:pass - try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x]) - except Exception:pass -@conf -def msvc_common_flags(conf): - v=conf.env - v['DEST_BINFMT']='pe' - v.append_value('CFLAGS',['/nologo']) - v.append_value('CXXFLAGS',['/nologo']) - v['DEFINES_ST']='/D%s' - v['CC_SRC_F']='' - v['CC_TGT_F']=['/c','/Fo'] - v['CXX_SRC_F']='' - v['CXX_TGT_F']=['/c','/Fo'] - if(v.MSVC_COMPILER=='msvc'and v.MSVC_VERSION>=8)or(v.MSVC_COMPILER=='wsdk'and v.MSVC_VERSION>=6): - v['CC_TGT_F']=['/FC']+v['CC_TGT_F'] - v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F'] - v['CPPPATH_ST']='/I%s' - v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:' - v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE'] - v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE'] - v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX'] - v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS'] - v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE'] - v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT'] - v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD'] - v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd'] - v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd'] - v['LIB_ST']='%s.lib' - v['LIBPATH_ST']='/LIBPATH:%s' - v['STLIB_ST']='%s.lib' - v['STLIBPATH_ST']='/LIBPATH:%s' - v.append_value('LINKFLAGS',['/NOLOGO']) - if v['MSVC_MANIFEST']: - v.append_value('LINKFLAGS',['/MANIFEST']) - v['CFLAGS_cshlib']=[] - v['CXXFLAGS_cxxshlib']=[] - v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL'] - v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll' - v['implib_PATTERN']='%s.lib' - v['IMPLIB_ST']='/IMPLIB:%s' - v['LINKFLAGS_cstlib']=[] - v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib' - v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe' -@after_method('apply_link') -@feature('c','cxx') -def apply_flags_msvc(self): - if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None): - return - is_static=isinstance(self.link_task,ccroot.stlink_task) - subsystem=getattr(self,'subsystem','') - if subsystem: - subsystem='/subsystem:%s'%subsystem - flags=is_static and'ARFLAGS'or'LINKFLAGS' - self.env.append_value(flags,subsystem) - if not is_static: - for f in self.env.LINKFLAGS: - d=f.lower() - if d[1:]=='debug': - pdbnode=self.link_task.outputs[0].change_ext('.pdb') - self.link_task.outputs.append(pdbnode) - if getattr(self,'install_task',None): - self.pdb_install_task=self.bld.install_files(self.install_task.dest,pdbnode,env=self.env) - break -@feature('cprogram','cshlib','cxxprogram','cxxshlib') -@after_method('apply_link') -def apply_manifest(self): - if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None): - out_node=self.link_task.outputs[0] - man_node=out_node.parent.find_or_declare(out_node.name+'.manifest') - self.link_task.outputs.append(man_node) - self.link_task.do_manifest=True -def exec_mf(self): - env=self.env - mtool=env['MT'] - if not mtool: - return 0 - self.do_manifest=False - outfile=self.outputs[0].abspath() - manifest=None - for out_node in self.outputs: - if out_node.name.endswith('.manifest'): - manifest=out_node.abspath() - break - if manifest is None: - return 0 - mode='' - if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features: - mode='1' - elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features: - mode='2' - debug('msvc: embedding manifest in mode %r'%mode) - lst=[]+mtool - lst.extend(Utils.to_list(env['MTFLAGS'])) - lst.extend(['-manifest',manifest]) - lst.append('-outputresource:%s;%s'%(outfile,mode)) - return self.exec_command(lst) -def quote_response_command(self,flag): - if flag.find(' ')>-1: - for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'): - if flag.startswith(x): - flag='%s"%s"'%(x,flag[len(x):]) - break - else: - flag='"%s"'%flag - return flag -def exec_response_command(self,cmd,**kw): - try: - tmp=None - if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192: - program=cmd[0] - cmd=[self.quote_response_command(x)for x in cmd] - (fd,tmp)=tempfile.mkstemp() - os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]).encode()) - os.close(fd) - cmd=[program,'@'+tmp] - ret=self.generator.bld.exec_command(cmd,**kw) - finally: - if tmp: - try: - os.remove(tmp) - except OSError: - pass - return ret -def exec_command_msvc(self,*k,**kw): - if isinstance(k[0],list): - lst=[] - carry='' - for a in k[0]: - if a=='/Fo'or a=='/doc'or a[-1]==':': - carry=a - else: - lst.append(carry+a) - carry='' - k=[lst] - if self.env['PATH']: - env=dict(self.env.env or os.environ) - env.update(PATH=';'.join(self.env['PATH'])) - kw['env']=env - bld=self.generator.bld - try: - if not kw.get('cwd',None): - kw['cwd']=bld.cwd - except AttributeError: - bld.cwd=kw['cwd']=bld.variant_dir - ret=self.exec_response_command(k[0],**kw) - if not ret and getattr(self,'do_manifest',None): - ret=self.exec_mf() - return ret -def wrap_class(class_name): - cls=Task.classes.get(class_name,None) - if not cls: - return None - derived_class=type(class_name,(cls,),{}) - def exec_command(self,*k,**kw): - if self.env['CC_NAME']=='msvc': - return self.exec_command_msvc(*k,**kw) - else: - return super(derived_class,self).exec_command(*k,**kw) - derived_class.exec_command=exec_command - derived_class.exec_response_command=exec_response_command - derived_class.quote_response_command=quote_response_command - derived_class.exec_command_msvc=exec_command_msvc - derived_class.exec_mf=exec_mf - if hasattr(cls,'hcode'): - derived_class.hcode=cls.hcode - return derived_class -for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split(): - wrap_class(k) -def make_winapp(self,family): - append=self.env.append_unique - append('DEFINES','WINAPI_FAMILY=%s'%family) - append('CXXFLAGS','/ZW') - append('CXXFLAGS','/TP') - for lib_path in self.env.LIBPATH: - append('CXXFLAGS','/AI%s'%lib_path) -@feature('winphoneapp') -@after_method('process_use') -@after_method('propagate_uselib_vars') -def make_winphone_app(self): - make_winapp(self,'WINAPI_FAMILY_PHONE_APP') - conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib') - conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib') -@feature('winapp') -@after_method('process_use') -@after_method('propagate_uselib_vars') -def make_windows_app(self): - make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py deleted file mode 100644 index a107298d..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/nasm.py +++ /dev/null @@ -1,16 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os -import waflib.Tools.asm -from waflib.TaskGen import feature -@feature('asm') -def apply_nasm_vars(self): - self.env.append_value('ASFLAGS',self.to_list(getattr(self,'nasm_flags',[]))) -def configure(conf): - conf.find_program(['nasm','yasm'],var='AS') - conf.env.AS_TGT_F=['-o'] - conf.env.ASLNK_TGT_F=['-o'] - conf.load('asm') - conf.env.ASMPATH_ST='-I%s'+os.sep diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py deleted file mode 100644 index d065f3f9..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/perl.py +++ /dev/null @@ -1,90 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os -from waflib import Task,Options,Utils -from waflib.Configure import conf -from waflib.TaskGen import extension,feature,before_method -@before_method('apply_incpaths','apply_link','propagate_uselib_vars') -@feature('perlext') -def init_perlext(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - if not'PERLEXT'in self.uselib:self.uselib.append('PERLEXT') - self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['perlext_PATTERN'] -@extension('.xs') -def xsubpp_file(self,node): - outnode=node.change_ext('.c') - self.create_task('xsubpp',node,outnode) - self.source.append(outnode) -class xsubpp(Task.Task): - run_str='${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}' - color='BLUE' - ext_out=['.h'] -@conf -def check_perl_version(self,minver=None): - res=True - if minver: - cver='.'.join(map(str,minver)) - else: - cver='' - self.start_msg('Checking for minimum perl version %s'%cver) - perl=getattr(Options.options,'perlbinary',None) - if not perl: - perl=self.find_program('perl',var='PERL') - if not perl: - self.end_msg("Perl not found",color="YELLOW") - return False - self.env['PERL']=perl - version=self.cmd_and_log(self.env.PERL+["-e",'printf \"%vd\", $^V']) - if not version: - res=False - version="Unknown" - elif not minver is None: - ver=tuple(map(int,version.split("."))) - if ver -#ifdef __cplusplus -extern "C" { -#endif - void Py_Initialize(void); - void Py_Finalize(void); -#ifdef __cplusplus -} -#endif -int main(int argc, char **argv) -{ - (void)argc; (void)argv; - Py_Initialize(); - Py_Finalize(); - return 0; -} -''' -INST=''' -import sys, py_compile -py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True) -''' -DISTUTILS_IMP=['from distutils.sysconfig import get_config_var, get_python_lib'] -@before_method('process_source') -@feature('py') -def feature_py(self): - self.install_path=getattr(self,'install_path','${PYTHONDIR}') - install_from=getattr(self,'install_from',None) - if install_from and not isinstance(install_from,Node.Node): - install_from=self.path.find_dir(install_from) - self.install_from=install_from - ver=self.env.PYTHON_VERSION - if not ver: - self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version') - if int(ver.replace('.',''))>31: - self.install_32=True -@extension('.py') -def process_py(self,node): - assert(getattr(self,'install_path')),'add features="py"' - if self.install_path: - if self.install_from: - self.bld.install_files(self.install_path,[node],cwd=self.install_from,relative_trick=True) - else: - self.bld.install_files(self.install_path,[node],relative_trick=True) - lst=[] - if self.env.PYC: - lst.append('pyc') - if self.env.PYO: - lst.append('pyo') - if self.install_path: - if self.install_from: - pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.install_from)),self.env) - else: - pyd=Utils.subst_vars("%s/%s"%(self.install_path,node.path_from(self.path)),self.env) - else: - pyd=node.abspath() - for ext in lst: - if self.env.PYTAG: - name=node.name[:-3] - pyobj=node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s"%(name,self.env.PYTAG,ext)) - pyobj.parent.mkdir() - else: - pyobj=node.change_ext(".%s"%ext) - tsk=self.create_task(ext,node,pyobj) - tsk.pyd=pyd - if self.install_path: - self.bld.install_files(os.path.dirname(pyd),pyobj,cwd=node.parent.get_bld(),relative_trick=True) -class pyc(Task.Task): - color='PINK' - def run(self): - cmd=[Utils.subst_vars('${PYTHON}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] - ret=self.generator.bld.exec_command(cmd) - return ret -class pyo(Task.Task): - color='PINK' - def run(self): - cmd=[Utils.subst_vars('${PYTHON}',self.env),Utils.subst_vars('${PYFLAGS_OPT}',self.env),'-c',INST,self.inputs[0].abspath(),self.outputs[0].abspath(),self.pyd] - ret=self.generator.bld.exec_command(cmd) - return ret -@feature('pyext') -@before_method('propagate_uselib_vars','apply_link') -@after_method('apply_bundle') -def init_pyext(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - if not'PYEXT'in self.uselib: - self.uselib.append('PYEXT') - self.env.cshlib_PATTERN=self.env.cxxshlib_PATTERN=self.env.macbundle_PATTERN=self.env.pyext_PATTERN - self.env.fcshlib_PATTERN=self.env.dshlib_PATTERN=self.env.pyext_PATTERN - try: - if not self.install_path: - return - except AttributeError: - self.install_path='${PYTHONARCHDIR}' -@feature('pyext') -@before_method('apply_link','apply_bundle') -def set_bundle(self): - if Utils.unversioned_sys_platform()=='darwin': - self.mac_bundle=True -@before_method('propagate_uselib_vars') -@feature('pyembed') -def init_pyembed(self): - self.uselib=self.to_list(getattr(self,'uselib',[])) - if not'PYEMBED'in self.uselib: - self.uselib.append('PYEMBED') -@conf -def get_python_variables(self,variables,imports=None): - if not imports: - try: - imports=self.python_imports - except AttributeError: - imports=DISTUTILS_IMP - program=list(imports) - program.append('') - for v in variables: - program.append("print(repr(%s))"%v) - os_env=dict(os.environ) - try: - del os_env['MACOSX_DEPLOYMENT_TARGET'] - except KeyError: - pass - try: - out=self.cmd_and_log(self.env.PYTHON+['-c','\n'.join(program)],env=os_env) - except Errors.WafError: - self.fatal('The distutils module is unusable: install "python-devel"?') - self.to_log(out) - return_values=[] - for s in out.splitlines(): - s=s.strip() - if not s: - continue - if s=='None': - return_values.append(None) - elif(s[0]=="'"and s[-1]=="'")or(s[0]=='"'and s[-1]=='"'): - return_values.append(eval(s)) - elif s[0].isdigit(): - return_values.append(int(s)) - else:break - return return_values -@conf -def test_pyembed(self,mode,msg='Testing pyembed configuration'): - self.check(header_name='Python.h',define_name='HAVE_PYEMBED',msg=msg,fragment=FRAG,errmsg='Could not build a python embedded interpreter',features='%s %sprogram pyembed'%(mode,mode)) -@conf -def test_pyext(self,mode,msg='Testing pyext configuration'): - self.check(header_name='Python.h',define_name='HAVE_PYEXT',msg=msg,fragment=FRAG,errmsg='Could not build python extensions',features='%s %sshlib pyext'%(mode,mode)) -@conf -def python_cross_compile(self,features='pyembed pyext'): - features=Utils.to_list(features) - if not('PYTHON_LDFLAGS'in self.environ or'PYTHON_PYEXT_LDFLAGS'in self.environ or'PYTHON_PYEMBED_LDFLAGS'in self.environ): - return False - for x in'PYTHON_VERSION PYTAG pyext_PATTERN'.split(): - if not x in self.environ: - self.fatal('Please set %s in the os environment'%x) - else: - self.env[x]=self.environ[x] - xx=self.env.CXX_NAME and'cxx'or'c' - if'pyext'in features: - flags=self.environ.get('PYTHON_PYEXT_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None)) - if flags is None: - self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required') - else: - self.parse_flags(flags,'PYEXT') - self.test_pyext(xx) - if'pyembed'in features: - flags=self.environ.get('PYTHON_PYEMBED_LDFLAGS',self.environ.get('PYTHON_LDFLAGS',None)) - if flags is None: - self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required') - else: - self.parse_flags(flags,'PYEMBED') - self.test_pyembed(xx) - return True -@conf -def check_python_headers(conf,features='pyembed pyext'): - features=Utils.to_list(features) - assert('pyembed'in features)or('pyext'in features),"check_python_headers features must include 'pyembed' and/or 'pyext'" - env=conf.env - if not env['CC_NAME']and not env['CXX_NAME']: - conf.fatal('load a compiler first (gcc, g++, ..)') - if conf.python_cross_compile(features): - return - if not env['PYTHON_VERSION']: - conf.check_python_version() - pybin=env.PYTHON - if not pybin: - conf.fatal('Could not find the python executable') - v='prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split() - try: - lst=conf.get_python_variables(["get_config_var('%s') or ''"%x for x in v]) - except RuntimeError: - conf.fatal("Python development headers not found (-v for details).") - vals=['%s = %r'%(x,y)for(x,y)in zip(v,lst)] - conf.to_log("Configuration returned from %r:\n%s\n"%(pybin,'\n'.join(vals))) - dct=dict(zip(v,lst)) - x='MACOSX_DEPLOYMENT_TARGET' - if dct[x]: - env[x]=conf.environ[x]=dct[x] - env['pyext_PATTERN']='%s'+dct['SO'] - num='.'.join(env['PYTHON_VERSION'].split('.')[:2]) - conf.find_program([''.join(pybin)+'-config','python%s-config'%num,'python-config-%s'%num,'python%sm-config'%num],var='PYTHON_CONFIG',msg="python-config",mandatory=False) - if env.PYTHON_CONFIG: - all_flags=[['--cflags','--libs','--ldflags']] - if sys.hexversion<0x2070000: - all_flags=[[k]for k in all_flags[0]] - xx=env.CXX_NAME and'cxx'or'c' - if'pyembed'in features: - for flags in all_flags: - conf.check_cfg(msg='Asking python-config for pyembed %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEMBED',args=flags) - try: - conf.test_pyembed(xx) - except conf.errors.ConfigurationError: - if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: - env.append_unique('LIBPATH_PYEMBED',[dct['LIBDIR']]) - conf.test_pyembed(xx) - else: - raise - if'pyext'in features: - for flags in all_flags: - conf.check_cfg(msg='Asking python-config for pyext %r flags'%' '.join(flags),path=env.PYTHON_CONFIG,package='',uselib_store='PYEXT',args=flags) - try: - conf.test_pyext(xx) - except conf.errors.ConfigurationError: - if dct['Py_ENABLE_SHARED']and dct['LIBDIR']: - env.append_unique('LIBPATH_PYEXT',[dct['LIBDIR']]) - conf.test_pyext(xx) - else: - raise - conf.define('HAVE_PYTHON_H',1) - return - all_flags=dct['LDFLAGS']+' '+dct['CFLAGS'] - conf.parse_flags(all_flags,'PYEMBED') - all_flags=dct['LDFLAGS']+' '+dct['LDSHARED']+' '+dct['CFLAGS'] - conf.parse_flags(all_flags,'PYEXT') - result=None - if not dct["LDVERSION"]: - dct["LDVERSION"]=env['PYTHON_VERSION'] - for name in('python'+dct['LDVERSION'],'python'+env['PYTHON_VERSION']+'m','python'+env['PYTHON_VERSION'].replace('.','')): - if not result and env['LIBPATH_PYEMBED']: - path=env['LIBPATH_PYEMBED'] - conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n"%path) - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBPATH_PYEMBED'%name) - if not result and dct['LIBDIR']: - path=[dct['LIBDIR']] - conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n"%path) - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in LIBDIR'%name) - if not result and dct['LIBPL']: - path=[dct['LIBPL']] - conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n") - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in python_LIBPL'%name) - if not result: - path=[os.path.join(dct['prefix'],"libs")] - conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n") - result=conf.check(lib=name,uselib='PYEMBED',libpath=path,mandatory=False,msg='Checking for library %s in $prefix/libs'%name) - if result: - break - if result: - env['LIBPATH_PYEMBED']=path - env.append_value('LIB_PYEMBED',[name]) - else: - conf.to_log("\n\n### LIB NOT FOUND\n") - if Utils.is_win32 or dct['Py_ENABLE_SHARED']: - env['LIBPATH_PYEXT']=env['LIBPATH_PYEMBED'] - env['LIB_PYEXT']=env['LIB_PYEMBED'] - conf.to_log("Include path for Python extensions (found via distutils module): %r\n"%(dct['INCLUDEPY'],)) - env['INCLUDES_PYEXT']=[dct['INCLUDEPY']] - env['INCLUDES_PYEMBED']=[dct['INCLUDEPY']] - if env['CC_NAME']=='gcc': - env.append_value('CFLAGS_PYEMBED',['-fno-strict-aliasing']) - env.append_value('CFLAGS_PYEXT',['-fno-strict-aliasing']) - if env['CXX_NAME']=='gcc': - env.append_value('CXXFLAGS_PYEMBED',['-fno-strict-aliasing']) - env.append_value('CXXFLAGS_PYEXT',['-fno-strict-aliasing']) - if env.CC_NAME=="msvc": - from distutils.msvccompiler import MSVCCompiler - dist_compiler=MSVCCompiler() - dist_compiler.initialize() - env.append_value('CFLAGS_PYEXT',dist_compiler.compile_options) - env.append_value('CXXFLAGS_PYEXT',dist_compiler.compile_options) - env.append_value('LINKFLAGS_PYEXT',dist_compiler.ldflags_shared) - conf.check(header_name='Python.h',define_name='HAVE_PYTHON_H',uselib='PYEMBED',fragment=FRAG,errmsg='Distutils not installed? Broken python installation? Get python-config now!') -@conf -def check_python_version(conf,minver=None): - assert minver is None or isinstance(minver,tuple) - pybin=conf.env['PYTHON'] - if not pybin: - conf.fatal('could not find the python executable') - cmd=pybin+['-c','import sys\nfor x in sys.version_info: print(str(x))'] - Logs.debug('python: Running python command %r'%cmd) - lines=conf.cmd_and_log(cmd).split() - assert len(lines)==5,"found %i lines, expected 5: %r"%(len(lines),lines) - pyver_tuple=(int(lines[0]),int(lines[1]),int(lines[2]),lines[3],int(lines[4])) - result=(minver is None)or(pyver_tuple>=minver) - if result: - pyver='.'.join([str(x)for x in pyver_tuple[:2]]) - conf.env['PYTHON_VERSION']=pyver - if'PYTHONDIR'in conf.env: - pydir=conf.env['PYTHONDIR'] - elif'PYTHONDIR'in conf.environ: - pydir=conf.environ['PYTHONDIR'] - else: - if Utils.is_win32: - (python_LIBDEST,pydir)=conf.get_python_variables(["get_config_var('LIBDEST') or ''","get_python_lib(standard_lib=0) or ''"]) - else: - python_LIBDEST=None - (pydir,)=conf.get_python_variables(["get_python_lib(standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) - if python_LIBDEST is None: - if conf.env['LIBDIR']: - python_LIBDEST=os.path.join(conf.env['LIBDIR'],"python"+pyver) - else: - python_LIBDEST=os.path.join(conf.env['PREFIX'],"lib","python"+pyver) - if'PYTHONARCHDIR'in conf.env: - pyarchdir=conf.env['PYTHONARCHDIR'] - elif'PYTHONARCHDIR'in conf.environ: - pyarchdir=conf.environ['PYTHONARCHDIR'] - else: - (pyarchdir,)=conf.get_python_variables(["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''"%conf.env.PREFIX]) - if not pyarchdir: - pyarchdir=pydir - if hasattr(conf,'define'): - conf.define('PYTHONDIR',pydir) - conf.define('PYTHONARCHDIR',pyarchdir) - conf.env['PYTHONDIR']=pydir - conf.env['PYTHONARCHDIR']=pyarchdir - pyver_full='.'.join(map(str,pyver_tuple[:3])) - if minver is None: - conf.msg('Checking for python version',pyver_full) - else: - minver_str='.'.join(map(str,minver)) - conf.msg('Checking for python version',pyver_tuple,">= %s"%(minver_str,)and'GREEN'or'YELLOW') - if not result: - conf.fatal('The python version is too old, expecting %r'%(minver,)) -PYTHON_MODULE_TEMPLATE=''' -import %s as current_module -version = getattr(current_module, '__version__', None) -if version is not None: - print(str(version)) -else: - print('unknown version') -''' -@conf -def check_python_module(conf,module_name,condition=''): - msg="Checking for python module '%s'"%module_name - if condition: - msg='%s (%s)'%(msg,condition) - conf.start_msg(msg) - try: - ret=conf.cmd_and_log(conf.env['PYTHON']+['-c',PYTHON_MODULE_TEMPLATE%module_name]) - except Exception: - conf.end_msg(False) - conf.fatal('Could not find the python module %r'%module_name) - ret=ret.strip() - if condition: - conf.end_msg(ret) - if ret=='unknown version': - conf.fatal('Could not check the %s version'%module_name) - from distutils.version import LooseVersion - def num(*k): - if isinstance(k[0],int): - return LooseVersion('.'.join([str(x)for x in k])) - else: - return LooseVersion(k[0]) - d={'num':num,'ver':LooseVersion(ret)} - ev=eval(condition,{},d) - if not ev: - conf.fatal('The %s version does not satisfy the requirements'%module_name) - else: - if ret=='unknown version': - conf.end_msg(True) - else: - conf.end_msg(ret) -def configure(conf): - v=conf.env - v['PYTHON']=Options.options.python or os.environ.get('PYTHON',sys.executable) - if Options.options.pythondir: - v['PYTHONDIR']=Options.options.pythondir - if Options.options.pythonarchdir: - v['PYTHONARCHDIR']=Options.options.pythonarchdir - conf.find_program('python',var='PYTHON') - v['PYFLAGS']='' - v['PYFLAGS_OPT']='-O' - v['PYC']=getattr(Options.options,'pyc',1) - v['PYO']=getattr(Options.options,'pyo',1) - try: - v.PYTAG=conf.cmd_and_log(conf.env.PYTHON+['-c',"import imp;print(imp.get_tag())"]).strip() - except Errors.WafError: - pass -def options(opt): - pyopt=opt.add_option_group("Python Options") - pyopt.add_option('--nopyc',dest='pyc',action='store_false',default=1,help='Do not install bytecode compiled .pyc files (configuration) [Default:install]') - pyopt.add_option('--nopyo',dest='pyo',action='store_false',default=1,help='Do not install optimised compiled .pyo files (configuration) [Default:install]') - pyopt.add_option('--python',dest="python",help='python binary to be used [Default: %s]'%sys.executable) - pyopt.add_option('--pythondir',dest='pythondir',help='Installation path for python modules (py, platform-independent .py and .pyc files)') - pyopt.add_option('--pythonarchdir',dest='pythonarchdir',help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py deleted file mode 100644 index 896c5b43..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt4.py +++ /dev/null @@ -1,442 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -try: - from xml.sax import make_parser - from xml.sax.handler import ContentHandler -except ImportError: - has_xml=False - ContentHandler=object -else: - has_xml=True -import os,sys -from waflib.Tools import cxx -from waflib import Task,Utils,Options,Errors,Context -from waflib.TaskGen import feature,after_method,extension -from waflib.Configure import conf -from waflib import Logs -MOC_H=['.h','.hpp','.hxx','.hh'] -EXT_RCC=['.qrc'] -EXT_UI=['.ui'] -EXT_QT4=['.cpp','.cc','.cxx','.C'] -QT4_LIBS="QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner" -class qxx(Task.classes['cxx']): - def __init__(self,*k,**kw): - Task.Task.__init__(self,*k,**kw) - self.moc_done=0 - def runnable_status(self): - if self.moc_done: - return Task.Task.runnable_status(self) - else: - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - self.add_moc_tasks() - return Task.Task.runnable_status(self) - def create_moc_task(self,h_node,m_node): - try: - moc_cache=self.generator.bld.moc_cache - except AttributeError: - moc_cache=self.generator.bld.moc_cache={} - try: - return moc_cache[h_node] - except KeyError: - tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) - tsk.set_inputs(h_node) - tsk.set_outputs(m_node) - if self.generator: - self.generator.tasks.append(tsk) - gen=self.generator.bld.producer - gen.outstanding.insert(0,tsk) - gen.total+=1 - return tsk - def moc_h_ext(self): - ext=[] - try: - ext=Options.options.qt_header_ext.split() - except AttributeError: - pass - if not ext: - ext=MOC_H - return ext - def add_moc_tasks(self): - node=self.inputs[0] - bld=self.generator.bld - try: - self.signature() - except KeyError: - pass - else: - delattr(self,'cache_sig') - include_nodes=[node.parent]+self.generator.includes_nodes - moctasks=[] - mocfiles=set([]) - for d in bld.raw_deps.get(self.uid(),[]): - if not d.endswith('.moc'): - continue - if d in mocfiles: - continue - mocfiles.add(d) - h_node=None - base2=d[:-4] - for x in include_nodes: - for e in self.moc_h_ext(): - h_node=x.find_node(base2+e) - if h_node: - break - if h_node: - m_node=h_node.change_ext('.moc') - break - else: - for k in EXT_QT4: - if base2.endswith(k): - for x in include_nodes: - h_node=x.find_node(base2) - if h_node: - break - if h_node: - m_node=h_node.change_ext(k+'.moc') - break - if not h_node: - raise Errors.WafError('No source found for %r which is a moc file'%d) - task=self.create_moc_task(h_node,m_node) - moctasks.append(task) - self.run_after.update(set(moctasks)) - self.moc_done=1 -class trans_update(Task.Task): - run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' - color='BLUE' -Task.update_outputs(trans_update) -class XMLHandler(ContentHandler): - def __init__(self): - self.buf=[] - self.files=[] - def startElement(self,name,attrs): - if name=='file': - self.buf=[] - def endElement(self,name): - if name=='file': - self.files.append(str(''.join(self.buf))) - def characters(self,cars): - self.buf.append(cars) -@extension(*EXT_RCC) -def create_rcc_task(self,node): - rcnode=node.change_ext('_rc.cpp') - self.create_task('rcc',node,rcnode) - cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) - try: - self.compiled_tasks.append(cpptask) - except AttributeError: - self.compiled_tasks=[cpptask] - return cpptask -@extension(*EXT_UI) -def create_uic_task(self,node): - uictask=self.create_task('ui4',node) - uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] -@extension('.ts') -def add_lang(self,node): - self.lang=self.to_list(getattr(self,'lang',[]))+[node] -@feature('qt4') -@after_method('apply_link') -def apply_qt4(self): - if getattr(self,'lang',None): - qmtasks=[] - for x in self.to_list(self.lang): - if isinstance(x,str): - x=self.path.find_resource(x+'.ts') - qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) - if getattr(self,'update',None)and Options.options.trans_qt4: - cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] - for x in qmtasks: - self.create_task('trans_update',cxxnodes,x.inputs) - if getattr(self,'langname',None): - qmnodes=[x.outputs[0]for x in qmtasks] - rcnode=self.langname - if isinstance(rcnode,str): - rcnode=self.path.find_or_declare(rcnode+'.qrc') - t=self.create_task('qm2rcc',qmnodes,rcnode) - k=create_rcc_task(self,t.outputs[0]) - self.link_task.inputs.append(k.outputs[0]) - lst=[] - for flag in self.to_list(self.env['CXXFLAGS']): - if len(flag)<2:continue - f=flag[0:2] - if f in('-D','-I','/D','/I'): - if(f[0]=='/'): - lst.append('-'+flag[1:]) - else: - lst.append(flag) - self.env.append_value('MOC_FLAGS',lst) -@extension(*EXT_QT4) -def cxx_hook(self,node): - return self.create_compiled_task('qxx',node) -class rcc(Task.Task): - color='BLUE' - run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' - ext_out=['.h'] - def rcname(self): - return os.path.splitext(self.inputs[0].name)[0] - def scan(self): - if not has_xml: - Logs.error('no xml support was found, the rcc dependencies will be incomplete!') - return([],[]) - parser=make_parser() - curHandler=XMLHandler() - parser.setContentHandler(curHandler) - fi=open(self.inputs[0].abspath(),'r') - try: - parser.parse(fi) - finally: - fi.close() - nodes=[] - names=[] - root=self.inputs[0].parent - for x in curHandler.files: - nd=root.find_resource(x) - if nd:nodes.append(nd) - else:names.append(x) - return(nodes,names) -class moc(Task.Task): - color='BLUE' - run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' - def keyword(self): - return"Creating" - def __str__(self): - return self.outputs[0].path_from(self.generator.bld.launch_node()) -class ui4(Task.Task): - color='BLUE' - run_str='${QT_UIC} ${SRC} -o ${TGT}' - ext_out=['.h'] -class ts2qm(Task.Task): - color='BLUE' - run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' -class qm2rcc(Task.Task): - color='BLUE' - after='ts2qm' - def run(self): - txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) - code='\n\n%s\n\n'%txt - self.outputs[0].write(code) -def configure(self): - self.find_qt4_binaries() - self.set_qt4_libs_to_check() - self.set_qt4_defines() - self.find_qt4_libraries() - self.add_qt4_rpath() - self.simplify_qt4_libs() -@conf -def find_qt4_binaries(self): - env=self.env - opt=Options.options - qtdir=getattr(opt,'qtdir','') - qtbin=getattr(opt,'qtbin','') - paths=[] - if qtdir: - qtbin=os.path.join(qtdir,'bin') - if not qtdir: - qtdir=os.environ.get('QT4_ROOT','') - qtbin=os.environ.get('QT4_BIN',None)or os.path.join(qtdir,'bin') - if qtbin: - paths=[qtbin] - if not qtdir: - paths=os.environ.get('PATH','').split(os.pathsep) - paths.append('/usr/share/qt4/bin/') - try: - lst=Utils.listdir('/usr/local/Trolltech/') - except OSError: - pass - else: - if lst: - lst.sort() - lst.reverse() - qtdir='/usr/local/Trolltech/%s/'%lst[0] - qtbin=os.path.join(qtdir,'bin') - paths.append(qtbin) - cand=None - prev_ver=['4','0','0'] - for qmk in('qmake-qt4','qmake4','qmake'): - try: - qmake=self.find_program(qmk,path_list=paths) - except self.errors.ConfigurationError: - pass - else: - try: - version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() - except self.errors.WafError: - pass - else: - if version: - new_ver=version.split('.') - if new_ver>prev_ver: - cand=qmake - prev_ver=new_ver - if cand: - self.env.QMAKE=cand - else: - self.fatal('Could not find qmake for qt4') - qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep - def find_bin(lst,var): - if var in env: - return - for f in lst: - try: - ret=self.find_program(f,path_list=paths) - except self.errors.ConfigurationError: - pass - else: - env[var]=ret - break - find_bin(['uic-qt3','uic3'],'QT_UIC3') - find_bin(['uic-qt4','uic'],'QT_UIC') - if not env.QT_UIC: - self.fatal('cannot find the uic compiler for qt4') - self.start_msg('Checking for uic version') - uicver=self.cmd_and_log(env.QT_UIC+["-version"],output=Context.BOTH) - uicver=''.join(uicver).strip() - uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') - self.end_msg(uicver) - if uicver.find(' 3.')!=-1: - self.fatal('this uic compiler is for qt3, add uic for qt4 to your path') - find_bin(['moc-qt4','moc'],'QT_MOC') - find_bin(['rcc-qt4','rcc'],'QT_RCC') - find_bin(['lrelease-qt4','lrelease'],'QT_LRELEASE') - find_bin(['lupdate-qt4','lupdate'],'QT_LUPDATE') - env['UIC3_ST']='%s -o %s' - env['UIC_ST']='%s -o %s' - env['MOC_ST']='-o' - env['ui_PATTERN']='ui_%s.h' - env['QT_LRELEASE_FLAGS']=['-silent'] - env.MOCCPPPATH_ST='-I%s' - env.MOCDEFINES_ST='-D%s' -@conf -def find_qt4_libraries(self): - qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT4_LIBDIR",None) - if not qtlibs: - try: - qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() - except Errors.WafError: - qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep - qtlibs=os.path.join(qtdir,'lib') - self.msg('Found the Qt4 libraries in',qtlibs) - qtincludes=os.environ.get("QT4_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() - env=self.env - if not'PKG_CONFIG_PATH'in os.environ: - os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib'%(qtlibs,qtlibs) - try: - if os.environ.get("QT4_XCOMPILE",None): - raise self.errors.ConfigurationError() - self.check_cfg(atleast_pkgconfig_version='0.1') - except self.errors.ConfigurationError: - for i in self.qt4_vars: - uselib=i.upper() - if Utils.unversioned_sys_platform()=="darwin": - frameworkName=i+".framework" - qtDynamicLib=os.path.join(qtlibs,frameworkName,i) - if os.path.exists(qtDynamicLib): - env.append_unique('FRAMEWORK_'+uselib,i) - self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) - elif env.DEST_OS!="win32": - qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") - qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") - if os.path.exists(qtDynamicLib): - env.append_unique('LIB_'+uselib,i) - self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') - elif os.path.exists(qtStaticLib): - env.append_unique('LIB_'+uselib,i) - self.msg('Checking for %s'%i,qtStaticLib,'GREEN') - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('LIBPATH_'+uselib,qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) - else: - for k in("lib%s.a","lib%s4.a","%s.lib","%s4.lib"): - lib=os.path.join(qtlibs,k%i) - if os.path.exists(lib): - env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) - self.msg('Checking for %s'%i,lib,'GREEN') - break - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('LIBPATH_'+uselib,qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) - uselib=i.upper()+"_debug" - for k in("lib%sd.a","lib%sd4.a","%sd.lib","%sd4.lib"): - lib=os.path.join(qtlibs,k%i) - if os.path.exists(lib): - env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) - self.msg('Checking for %s'%i,lib,'GREEN') - break - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('LIBPATH_'+uselib,qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) - else: - for i in self.qt4_vars_debug+self.qt4_vars: - self.check_cfg(package=i,args='--cflags --libs',mandatory=False) -@conf -def simplify_qt4_libs(self): - env=self.env - def process_lib(vars_,coreval): - for d in vars_: - var=d.upper() - if var=='QTCORE': - continue - value=env['LIBPATH_'+var] - if value: - core=env[coreval] - accu=[] - for lib in value: - if lib in core: - continue - accu.append(lib) - env['LIBPATH_'+var]=accu - process_lib(self.qt4_vars,'LIBPATH_QTCORE') - process_lib(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') -@conf -def add_qt4_rpath(self): - env=self.env - if getattr(Options.options,'want_rpath',False): - def process_rpath(vars_,coreval): - for d in vars_: - var=d.upper() - value=env['LIBPATH_'+var] - if value: - core=env[coreval] - accu=[] - for lib in value: - if var!='QTCORE': - if lib in core: - continue - accu.append('-Wl,--rpath='+lib) - env['RPATH_'+var]=accu - process_rpath(self.qt4_vars,'LIBPATH_QTCORE') - process_rpath(self.qt4_vars_debug,'LIBPATH_QTCORE_DEBUG') -@conf -def set_qt4_libs_to_check(self): - if not hasattr(self,'qt4_vars'): - self.qt4_vars=QT4_LIBS - self.qt4_vars=Utils.to_list(self.qt4_vars) - if not hasattr(self,'qt4_vars_debug'): - self.qt4_vars_debug=[a+'_debug'for a in self.qt4_vars] - self.qt4_vars_debug=Utils.to_list(self.qt4_vars_debug) -@conf -def set_qt4_defines(self): - if sys.platform!='win32': - return - for x in self.qt4_vars: - y=x[2:].upper() - self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) - self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) -def options(opt): - opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') - opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') - for i in'qtdir qtbin qtlibs'.split(): - opt.add_option('--'+i,type='string',default='',dest=i) - opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt4",default=False) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py deleted file mode 100644 index f69c79dc..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/qt5.py +++ /dev/null @@ -1,489 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -try: - from xml.sax import make_parser - from xml.sax.handler import ContentHandler -except ImportError: - has_xml=False - ContentHandler=object -else: - has_xml=True -import os,sys -from waflib.Tools import cxx -from waflib import Task,Utils,Options,Errors,Context -from waflib.TaskGen import feature,after_method,extension -from waflib.Configure import conf -from waflib import Logs -MOC_H=['.h','.hpp','.hxx','.hh'] -EXT_RCC=['.qrc'] -EXT_UI=['.ui'] -EXT_QT5=['.cpp','.cc','.cxx','.C'] -QT5_LIBS=''' -qtmain -Qt5Bluetooth -Qt5CLucene -Qt5Concurrent -Qt5Core -Qt5DBus -Qt5Declarative -Qt5DesignerComponents -Qt5Designer -Qt5Gui -Qt5Help -Qt5MultimediaQuick_p -Qt5Multimedia -Qt5MultimediaWidgets -Qt5Network -Qt5Nfc -Qt5OpenGL -Qt5Positioning -Qt5PrintSupport -Qt5Qml -Qt5QuickParticles -Qt5Quick -Qt5QuickTest -Qt5Script -Qt5ScriptTools -Qt5Sensors -Qt5SerialPort -Qt5Sql -Qt5Svg -Qt5Test -Qt5WebKit -Qt5WebKitWidgets -Qt5Widgets -Qt5WinExtras -Qt5X11Extras -Qt5XmlPatterns -Qt5Xml''' -class qxx(Task.classes['cxx']): - def __init__(self,*k,**kw): - Task.Task.__init__(self,*k,**kw) - self.moc_done=0 - def runnable_status(self): - if self.moc_done: - return Task.Task.runnable_status(self) - else: - for t in self.run_after: - if not t.hasrun: - return Task.ASK_LATER - self.add_moc_tasks() - return Task.Task.runnable_status(self) - def create_moc_task(self,h_node,m_node): - try: - moc_cache=self.generator.bld.moc_cache - except AttributeError: - moc_cache=self.generator.bld.moc_cache={} - try: - return moc_cache[h_node] - except KeyError: - tsk=moc_cache[h_node]=Task.classes['moc'](env=self.env,generator=self.generator) - tsk.set_inputs(h_node) - tsk.set_outputs(m_node) - if self.generator: - self.generator.tasks.append(tsk) - gen=self.generator.bld.producer - gen.outstanding.insert(0,tsk) - gen.total+=1 - return tsk - else: - delattr(self,'cache_sig') - def moc_h_ext(self): - ext=[] - try: - ext=Options.options.qt_header_ext.split() - except AttributeError: - pass - if not ext: - ext=MOC_H - return ext - def add_moc_tasks(self): - node=self.inputs[0] - bld=self.generator.bld - try: - self.signature() - except KeyError: - pass - else: - delattr(self,'cache_sig') - include_nodes=[node.parent]+self.generator.includes_nodes - moctasks=[] - mocfiles=set([]) - for d in bld.raw_deps.get(self.uid(),[]): - if not d.endswith('.moc'): - continue - if d in mocfiles: - continue - mocfiles.add(d) - h_node=None - base2=d[:-4] - for x in include_nodes: - for e in self.moc_h_ext(): - h_node=x.find_node(base2+e) - if h_node: - break - if h_node: - m_node=h_node.change_ext('.moc') - break - else: - for k in EXT_QT5: - if base2.endswith(k): - for x in include_nodes: - h_node=x.find_node(base2) - if h_node: - break - if h_node: - m_node=h_node.change_ext(k+'.moc') - break - if not h_node: - raise Errors.WafError('No source found for %r which is a moc file'%d) - task=self.create_moc_task(h_node,m_node) - moctasks.append(task) - self.run_after.update(set(moctasks)) - self.moc_done=1 -class trans_update(Task.Task): - run_str='${QT_LUPDATE} ${SRC} -ts ${TGT}' - color='BLUE' -Task.update_outputs(trans_update) -class XMLHandler(ContentHandler): - def __init__(self): - self.buf=[] - self.files=[] - def startElement(self,name,attrs): - if name=='file': - self.buf=[] - def endElement(self,name): - if name=='file': - self.files.append(str(''.join(self.buf))) - def characters(self,cars): - self.buf.append(cars) -@extension(*EXT_RCC) -def create_rcc_task(self,node): - rcnode=node.change_ext('_rc.cpp') - self.create_task('rcc',node,rcnode) - cpptask=self.create_task('cxx',rcnode,rcnode.change_ext('.o')) - try: - self.compiled_tasks.append(cpptask) - except AttributeError: - self.compiled_tasks=[cpptask] - return cpptask -@extension(*EXT_UI) -def create_uic_task(self,node): - uictask=self.create_task('ui5',node) - uictask.outputs=[self.path.find_or_declare(self.env['ui_PATTERN']%node.name[:-3])] -@extension('.ts') -def add_lang(self,node): - self.lang=self.to_list(getattr(self,'lang',[]))+[node] -@feature('qt5') -@after_method('apply_link') -def apply_qt5(self): - if getattr(self,'lang',None): - qmtasks=[] - for x in self.to_list(self.lang): - if isinstance(x,str): - x=self.path.find_resource(x+'.ts') - qmtasks.append(self.create_task('ts2qm',x,x.change_ext('.qm'))) - if getattr(self,'update',None)and Options.options.trans_qt5: - cxxnodes=[a.inputs[0]for a in self.compiled_tasks]+[a.inputs[0]for a in self.tasks if getattr(a,'inputs',None)and a.inputs[0].name.endswith('.ui')] - for x in qmtasks: - self.create_task('trans_update',cxxnodes,x.inputs) - if getattr(self,'langname',None): - qmnodes=[x.outputs[0]for x in qmtasks] - rcnode=self.langname - if isinstance(rcnode,str): - rcnode=self.path.find_or_declare(rcnode+'.qrc') - t=self.create_task('qm2rcc',qmnodes,rcnode) - k=create_rcc_task(self,t.outputs[0]) - self.link_task.inputs.append(k.outputs[0]) - lst=[] - for flag in self.to_list(self.env['CXXFLAGS']): - if len(flag)<2:continue - f=flag[0:2] - if f in('-D','-I','/D','/I'): - if(f[0]=='/'): - lst.append('-'+flag[1:]) - else: - lst.append(flag) - self.env.append_value('MOC_FLAGS',lst) -@extension(*EXT_QT5) -def cxx_hook(self,node): - return self.create_compiled_task('qxx',node) -class rcc(Task.Task): - color='BLUE' - run_str='${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}' - ext_out=['.h'] - def rcname(self): - return os.path.splitext(self.inputs[0].name)[0] - def scan(self): - if not has_xml: - Logs.error('no xml support was found, the rcc dependencies will be incomplete!') - return([],[]) - parser=make_parser() - curHandler=XMLHandler() - parser.setContentHandler(curHandler) - fi=open(self.inputs[0].abspath(),'r') - try: - parser.parse(fi) - finally: - fi.close() - nodes=[] - names=[] - root=self.inputs[0].parent - for x in curHandler.files: - nd=root.find_resource(x) - if nd:nodes.append(nd) - else:names.append(x) - return(nodes,names) -class moc(Task.Task): - color='BLUE' - run_str='${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}' -class ui5(Task.Task): - color='BLUE' - run_str='${QT_UIC} ${SRC} -o ${TGT}' - ext_out=['.h'] -class ts2qm(Task.Task): - color='BLUE' - run_str='${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}' -class qm2rcc(Task.Task): - color='BLUE' - after='ts2qm' - def run(self): - txt='\n'.join(['%s'%k.path_from(self.outputs[0].parent)for k in self.inputs]) - code='\n\n%s\n\n'%txt - self.outputs[0].write(code) -def configure(self): - self.find_qt5_binaries() - self.set_qt5_libs_to_check() - self.set_qt5_defines() - self.find_qt5_libraries() - self.add_qt5_rpath() - self.simplify_qt5_libs() -@conf -def find_qt5_binaries(self): - env=self.env - opt=Options.options - qtdir=getattr(opt,'qtdir','') - qtbin=getattr(opt,'qtbin','') - paths=[] - if qtdir: - qtbin=os.path.join(qtdir,'bin') - if not qtdir: - qtdir=os.environ.get('QT5_ROOT','') - qtbin=os.environ.get('QT5_BIN',None)or os.path.join(qtdir,'bin') - if qtbin: - paths=[qtbin] - if not qtdir: - paths=os.environ.get('PATH','').split(os.pathsep) - paths.append('/usr/share/qt5/bin/') - try: - lst=Utils.listdir('/usr/local/Trolltech/') - except OSError: - pass - else: - if lst: - lst.sort() - lst.reverse() - qtdir='/usr/local/Trolltech/%s/'%lst[0] - qtbin=os.path.join(qtdir,'bin') - paths.append(qtbin) - cand=None - prev_ver=['5','0','0'] - for qmk in('qmake-qt5','qmake5','qmake'): - try: - qmake=self.find_program(qmk,path_list=paths) - except self.errors.ConfigurationError: - pass - else: - try: - version=self.cmd_and_log(qmake+['-query','QT_VERSION']).strip() - except self.errors.WafError: - pass - else: - if version: - new_ver=version.split('.') - if new_ver>prev_ver: - cand=qmake - prev_ver=new_ver - if not cand: - try: - self.find_program('qtchooser') - except self.errors.ConfigurationError: - pass - else: - cmd=self.env.QTCHOOSER+['-qt=5','-run-tool=qmake'] - try: - version=self.cmd_and_log(cmd+['-query','QT_VERSION']) - except self.errors.WafError: - pass - else: - cand=cmd - if cand: - self.env.QMAKE=cand - else: - self.fatal('Could not find qmake for qt5') - self.env.QT_INSTALL_BINS=qtbin=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_BINS']).strip()+os.sep - paths.insert(0,qtbin) - def find_bin(lst,var): - if var in env: - return - for f in lst: - try: - ret=self.find_program(f,path_list=paths) - except self.errors.ConfigurationError: - pass - else: - env[var]=ret - break - find_bin(['uic-qt5','uic'],'QT_UIC') - if not env.QT_UIC: - self.fatal('cannot find the uic compiler for qt5') - self.start_msg('Checking for uic version') - uicver=self.cmd_and_log(env.QT_UIC+['-version'],output=Context.BOTH) - uicver=''.join(uicver).strip() - uicver=uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt','') - self.end_msg(uicver) - if uicver.find(' 3.')!=-1 or uicver.find(' 4.')!=-1: - self.fatal('this uic compiler is for qt3 or qt5, add uic for qt5 to your path') - find_bin(['moc-qt5','moc'],'QT_MOC') - find_bin(['rcc-qt5','rcc'],'QT_RCC') - find_bin(['lrelease-qt5','lrelease'],'QT_LRELEASE') - find_bin(['lupdate-qt5','lupdate'],'QT_LUPDATE') - env['UIC_ST']='%s -o %s' - env['MOC_ST']='-o' - env['ui_PATTERN']='ui_%s.h' - env['QT_LRELEASE_FLAGS']=['-silent'] - env.MOCCPPPATH_ST='-I%s' - env.MOCDEFINES_ST='-D%s' -@conf -def find_qt5_libraries(self): - qtlibs=getattr(Options.options,'qtlibs',None)or os.environ.get("QT5_LIBDIR",None) - if not qtlibs: - try: - qtlibs=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_LIBS']).strip() - except Errors.WafError: - qtdir=self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_PREFIX']).strip()+os.sep - qtlibs=os.path.join(qtdir,'lib') - self.msg('Found the Qt5 libraries in',qtlibs) - qtincludes=os.environ.get("QT5_INCLUDES",None)or self.cmd_and_log(self.env.QMAKE+['-query','QT_INSTALL_HEADERS']).strip() - env=self.env - if not'PKG_CONFIG_PATH'in os.environ: - os.environ['PKG_CONFIG_PATH']='%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib'%(qtlibs,qtlibs) - try: - if os.environ.get("QT5_XCOMPILE",None): - raise self.errors.ConfigurationError() - self.check_cfg(atleast_pkgconfig_version='0.1') - except self.errors.ConfigurationError: - for i in self.qt5_vars: - uselib=i.upper() - if Utils.unversioned_sys_platform()=="darwin": - frameworkName=i+".framework" - qtDynamicLib=os.path.join(qtlibs,frameworkName,i) - if os.path.exists(qtDynamicLib): - env.append_unique('FRAMEWORK_'+uselib,i) - self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('INCLUDES_'+uselib,os.path.join(qtlibs,frameworkName,'Headers')) - elif env.DEST_OS!="win32": - qtDynamicLib=os.path.join(qtlibs,"lib"+i+".so") - qtStaticLib=os.path.join(qtlibs,"lib"+i+".a") - if os.path.exists(qtDynamicLib): - env.append_unique('LIB_'+uselib,i) - self.msg('Checking for %s'%i,qtDynamicLib,'GREEN') - elif os.path.exists(qtStaticLib): - env.append_unique('LIB_'+uselib,i) - self.msg('Checking for %s'%i,qtStaticLib,'GREEN') - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('LIBPATH_'+uselib,qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i)) - else: - for k in("lib%s.a","lib%s5.a","%s.lib","%s5.lib"): - lib=os.path.join(qtlibs,k%i) - if os.path.exists(lib): - env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) - self.msg('Checking for %s'%i,lib,'GREEN') - break - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('LIBPATH_'+uselib,qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i.replace('Qt5','Qt'))) - uselib=i.upper()+"_debug" - for k in("lib%sd.a","lib%sd5.a","%sd.lib","%sd5.lib"): - lib=os.path.join(qtlibs,k%i) - if os.path.exists(lib): - env.append_unique('LIB_'+uselib,i+k[k.find("%s")+2:k.find('.')]) - self.msg('Checking for %s'%i,lib,'GREEN') - break - else: - self.msg('Checking for %s'%i,False,'YELLOW') - env.append_unique('LIBPATH_'+uselib,qtlibs) - env.append_unique('INCLUDES_'+uselib,qtincludes) - env.append_unique('INCLUDES_'+uselib,os.path.join(qtincludes,i.replace('Qt5','Qt'))) - else: - for i in self.qt5_vars_debug+self.qt5_vars: - self.check_cfg(package=i,args='--cflags --libs',mandatory=False) -@conf -def simplify_qt5_libs(self): - env=self.env - def process_lib(vars_,coreval): - for d in vars_: - var=d.upper() - if var=='QTCORE': - continue - value=env['LIBPATH_'+var] - if value: - core=env[coreval] - accu=[] - for lib in value: - if lib in core: - continue - accu.append(lib) - env['LIBPATH_'+var]=accu - process_lib(self.qt5_vars,'LIBPATH_QTCORE') - process_lib(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG') -@conf -def add_qt5_rpath(self): - env=self.env - if getattr(Options.options,'want_rpath',False): - def process_rpath(vars_,coreval): - for d in vars_: - var=d.upper() - value=env['LIBPATH_'+var] - if value: - core=env[coreval] - accu=[] - for lib in value: - if var!='QTCORE': - if lib in core: - continue - accu.append('-Wl,--rpath='+lib) - env['RPATH_'+var]=accu - process_rpath(self.qt5_vars,'LIBPATH_QTCORE') - process_rpath(self.qt5_vars_debug,'LIBPATH_QTCORE_DEBUG') -@conf -def set_qt5_libs_to_check(self): - if not hasattr(self,'qt5_vars'): - self.qt5_vars=QT5_LIBS - self.qt5_vars=Utils.to_list(self.qt5_vars) - if not hasattr(self,'qt5_vars_debug'): - self.qt5_vars_debug=[a+'_debug'for a in self.qt5_vars] - self.qt5_vars_debug=Utils.to_list(self.qt5_vars_debug) -@conf -def set_qt5_defines(self): - if sys.platform!='win32': - return - for x in self.qt5_vars: - y=x.replace('Qt5','Qt')[2:].upper() - self.env.append_unique('DEFINES_%s'%x.upper(),'QT_%s_LIB'%y) - self.env.append_unique('DEFINES_%s_DEBUG'%x.upper(),'QT_%s_LIB'%y) -def options(opt): - opt.add_option('--want-rpath',action='store_true',default=False,dest='want_rpath',help='enable the rpath for qt libraries') - opt.add_option('--header-ext',type='string',default='',help='header extension for moc files',dest='qt_header_ext') - for i in'qtdir qtbin qtlibs'.split(): - opt.add_option('--'+i,type='string',default='',dest=i) - opt.add_option('--translate',action="store_true",help="collect translation strings",dest="trans_qt5",default=False) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py deleted file mode 100644 index 39c170ae..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/ruby.py +++ /dev/null @@ -1,103 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os -from waflib import Options,Utils,Task -from waflib.TaskGen import before_method,feature,extension -from waflib.Configure import conf -@feature('rubyext') -@before_method('apply_incpaths','apply_lib_vars','apply_bundle','apply_link') -def init_rubyext(self): - self.install_path='${ARCHDIR_RUBY}' - self.uselib=self.to_list(getattr(self,'uselib','')) - if not'RUBY'in self.uselib: - self.uselib.append('RUBY') - if not'RUBYEXT'in self.uselib: - self.uselib.append('RUBYEXT') -@feature('rubyext') -@before_method('apply_link','propagate_uselib') -def apply_ruby_so_name(self): - self.env['cshlib_PATTERN']=self.env['cxxshlib_PATTERN']=self.env['rubyext_PATTERN'] -@conf -def check_ruby_version(self,minver=()): - if Options.options.rubybinary: - self.env.RUBY=Options.options.rubybinary - else: - self.find_program('ruby',var='RUBY') - ruby=self.env.RUBY - try: - version=self.cmd_and_log(ruby+['-e','puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip() - except Exception: - self.fatal('could not determine ruby version') - self.env.RUBY_VERSION=version - try: - ver=tuple(map(int,version.split("."))) - except Exception: - self.fatal('unsupported ruby version %r'%version) - cver='' - if minver: - if ver=(1,9,0): - ruby_hdrdir=read_config('rubyhdrdir') - cpppath+=ruby_hdrdir - if version>=(2,0,0): - cpppath+=read_config('rubyarchhdrdir') - cpppath+=[os.path.join(ruby_hdrdir[0],read_config('arch')[0])] - self.check(header_name='ruby.h',includes=cpppath,errmsg='could not find ruby header file',link_header_test=False) - self.env.LIBPATH_RUBYEXT=read_config('libdir') - self.env.LIBPATH_RUBYEXT+=archdir - self.env.INCLUDES_RUBYEXT=cpppath - self.env.CFLAGS_RUBYEXT=read_config('CCDLFLAGS') - self.env.rubyext_PATTERN='%s.'+read_config('DLEXT')[0] - flags=read_config('LDSHARED') - while flags and flags[0][0]!='-': - flags=flags[1:] - if len(flags)>1 and flags[1]=="ppc": - flags=flags[2:] - self.env.LINKFLAGS_RUBYEXT=flags - self.env.LINKFLAGS_RUBYEXT+=read_config('LIBS') - self.env.LINKFLAGS_RUBYEXT+=read_config('LIBRUBYARG_SHARED') - if Options.options.rubyarchdir: - self.env.ARCHDIR_RUBY=Options.options.rubyarchdir - else: - self.env.ARCHDIR_RUBY=read_config('sitearchdir')[0] - if Options.options.rubylibdir: - self.env.LIBDIR_RUBY=Options.options.rubylibdir - else: - self.env.LIBDIR_RUBY=read_config('sitelibdir')[0] -@conf -def check_ruby_module(self,module_name): - self.start_msg('Ruby module %s'%module_name) - try: - self.cmd_and_log(self.env.RUBY+['-e','require \'%s\';puts 1'%module_name]) - except Exception: - self.end_msg(False) - self.fatal('Could not find the ruby module %r'%module_name) - self.end_msg(True) -@extension('.rb') -def process(self,node): - return self.create_task('run_ruby',node) -class run_ruby(Task.Task): - run_str='${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}' -def options(opt): - opt.add_option('--with-ruby-archdir',type='string',dest='rubyarchdir',help='Specify directory where to install arch specific files') - opt.add_option('--with-ruby-libdir',type='string',dest='rubylibdir',help='Specify alternate ruby library path') - opt.add_option('--with-ruby-binary',type='string',dest='rubybinary',help='Specify alternate ruby binary') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py deleted file mode 100644 index f014abfc..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncc.py +++ /dev/null @@ -1,46 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar -from waflib.Configure import conf -@conf -def find_scc(conf): - v=conf.env - cc=conf.find_program('cc',var='CC') - try: - conf.cmd_and_log(cc+['-flags']) - except Exception: - conf.fatal('%r is not a Sun compiler'%cc) - v.CC_NAME='sun' - conf.get_suncc_version(cc) -@conf -def scc_common_flags(conf): - v=conf.env - v['CC_SRC_F']=[] - v['CC_TGT_F']=['-c','-o'] - if not v['LINK_CC']:v['LINK_CC']=v['CC'] - v['CCLNK_SRC_F']='' - v['CCLNK_TGT_F']=['-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['SONAME_ST']='-Wl,-h,%s' - v['SHLIB_MARKER']='-Bdynamic' - v['STLIB_MARKER']='-Bstatic' - v['cprogram_PATTERN']='%s' - v['CFLAGS_cshlib']=['-xcode=pic32','-DPIC'] - v['LINKFLAGS_cshlib']=['-G'] - v['cshlib_PATTERN']='lib%s.so' - v['LINKFLAGS_cstlib']=['-Bstatic'] - v['cstlib_PATTERN']='lib%s.a' -def configure(conf): - conf.find_scc() - conf.find_ar() - conf.scc_common_flags() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py deleted file mode 100644 index 7130fdfd..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/suncxx.py +++ /dev/null @@ -1,46 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar -from waflib.Configure import conf -@conf -def find_sxx(conf): - v=conf.env - cc=conf.find_program(['CC','c++'],var='CXX') - try: - conf.cmd_and_log(cc+['-flags']) - except Exception: - conf.fatal('%r is not a Sun compiler'%cc) - v.CXX_NAME='sun' - conf.get_suncc_version(cc) -@conf -def sxx_common_flags(conf): - v=conf.env - v['CXX_SRC_F']=[] - v['CXX_TGT_F']=['-c','-o'] - if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] - v['CXXLNK_SRC_F']=[] - v['CXXLNK_TGT_F']=['-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['SONAME_ST']='-Wl,-h,%s' - v['SHLIB_MARKER']='-Bdynamic' - v['STLIB_MARKER']='-Bstatic' - v['cxxprogram_PATTERN']='%s' - v['CXXFLAGS_cxxshlib']=['-xcode=pic32','-DPIC'] - v['LINKFLAGS_cxxshlib']=['-G'] - v['cxxshlib_PATTERN']='lib%s.so' - v['LINKFLAGS_cxxstlib']=['-Bstatic'] - v['cxxstlib_PATTERN']='lib%s.a' -def configure(conf): - conf.find_sxx() - conf.find_ar() - conf.sxx_common_flags() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py deleted file mode 100644 index a91fd911..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/tex.py +++ /dev/null @@ -1,317 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re -from waflib import Utils,Task,Errors,Logs,Node -from waflib.TaskGen import feature,before_method -re_bibunit=re.compile(r'\\(?Pputbib)\[(?P[^\[\]]*)\]',re.M) -def bibunitscan(self): - node=self.inputs[0] - nodes=[] - if not node:return nodes - code=node.read() - for match in re_bibunit.finditer(code): - path=match.group('file') - if path: - for k in('','.bib'): - Logs.debug('tex: trying %s%s'%(path,k)) - fi=node.parent.find_resource(path+k) - if fi: - nodes.append(fi) - else: - Logs.debug('tex: could not find %s'%path) - Logs.debug("tex: found the following bibunit files: %s"%nodes) - return nodes -exts_deps_tex=['','.ltx','.tex','.bib','.pdf','.png','.eps','.ps','.sty'] -exts_tex=['.ltx','.tex'] -re_tex=re.compile(r'\\(?Pusepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P[^{}]*)}',re.M) -g_bibtex_re=re.compile('bibdata',re.M) -g_glossaries_re=re.compile('\\@newglossary',re.M) -class tex(Task.Task): - bibtex_fun,_=Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}',shell=False) - bibtex_fun.__doc__=""" - Execute the program **bibtex** - """ - makeindex_fun,_=Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}',shell=False) - makeindex_fun.__doc__=""" - Execute the program **makeindex** - """ - makeglossaries_fun,_=Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}',shell=False) - makeglossaries_fun.__doc__=""" - Execute the program **makeglossaries** - """ - def exec_command(self,cmd,**kw): - bld=self.generator.bld - Logs.info('runner: %r'%cmd) - try: - if not kw.get('cwd',None): - kw['cwd']=bld.cwd - except AttributeError: - bld.cwd=kw['cwd']=bld.variant_dir - return Utils.subprocess.Popen(cmd,**kw).wait() - def scan_aux(self,node): - nodes=[node] - re_aux=re.compile(r'\\@input{(?P[^{}]*)}',re.M) - def parse_node(node): - code=node.read() - for match in re_aux.finditer(code): - path=match.group('file') - found=node.parent.find_or_declare(path) - if found and found not in nodes: - Logs.debug('tex: found aux node '+found.abspath()) - nodes.append(found) - parse_node(found) - parse_node(node) - return nodes - def scan(self): - node=self.inputs[0] - nodes=[] - names=[] - seen=[] - if not node:return(nodes,names) - def parse_node(node): - if node in seen: - return - seen.append(node) - code=node.read() - global re_tex - for match in re_tex.finditer(code): - multibib=match.group('type') - if multibib and multibib.startswith('bibliography'): - multibib=multibib[len('bibliography'):] - if multibib.startswith('style'): - continue - else: - multibib=None - for path in match.group('file').split(','): - if path: - add_name=True - found=None - for k in exts_deps_tex: - for up in self.texinputs_nodes: - Logs.debug('tex: trying %s%s'%(path,k)) - found=up.find_resource(path+k) - if found: - break - for tsk in self.generator.tasks: - if not found or found in tsk.outputs: - break - else: - nodes.append(found) - add_name=False - for ext in exts_tex: - if found.name.endswith(ext): - parse_node(found) - break - if found and multibib and found.name.endswith('.bib'): - try: - self.multibibs.append(found) - except AttributeError: - self.multibibs=[found] - if add_name: - names.append(path) - parse_node(node) - for x in nodes: - x.parent.get_bld().mkdir() - Logs.debug("tex: found the following : %s and names %s"%(nodes,names)) - return(nodes,names) - def check_status(self,msg,retcode): - if retcode!=0: - raise Errors.WafError("%r command exit status %r"%(msg,retcode)) - def bibfile(self): - for aux_node in self.aux_nodes: - try: - ct=aux_node.read() - except EnvironmentError: - Logs.error('Error reading %s: %r'%aux_node.abspath()) - continue - if g_bibtex_re.findall(ct): - Logs.info('calling bibtex') - self.env.env={} - self.env.env.update(os.environ) - self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) - self.env.SRCFILE=aux_node.name[:-4] - self.check_status('error when calling bibtex',self.bibtex_fun()) - for node in getattr(self,'multibibs',[]): - self.env.env={} - self.env.env.update(os.environ) - self.env.env.update({'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()}) - self.env.SRCFILE=node.name[:-4] - self.check_status('error when calling bibtex',self.bibtex_fun()) - def bibunits(self): - try: - bibunits=bibunitscan(self) - except OSError: - Logs.error('error bibunitscan') - else: - if bibunits: - fn=['bu'+str(i)for i in range(1,len(bibunits)+1)] - if fn: - Logs.info('calling bibtex on bibunits') - for f in fn: - self.env.env={'BIBINPUTS':self.texinputs(),'BSTINPUTS':self.texinputs()} - self.env.SRCFILE=f - self.check_status('error when calling bibtex',self.bibtex_fun()) - def makeindex(self): - self.idx_node=self.inputs[0].change_ext('.idx') - try: - idx_path=self.idx_node.abspath() - os.stat(idx_path) - except OSError: - Logs.info('index file %s absent, not calling makeindex'%idx_path) - else: - Logs.info('calling makeindex') - self.env.SRCFILE=self.idx_node.name - self.env.env={} - self.check_status('error when calling makeindex %s'%idx_path,self.makeindex_fun()) - def bibtopic(self): - p=self.inputs[0].parent.get_bld() - if os.path.exists(os.path.join(p.abspath(),'btaux.aux')): - self.aux_nodes+=p.ant_glob('*[0-9].aux') - def makeglossaries(self): - src_file=self.inputs[0].abspath() - base_file=os.path.basename(src_file) - base,_=os.path.splitext(base_file) - for aux_node in self.aux_nodes: - try: - ct=aux_node.read() - except EnvironmentError: - Logs.error('Error reading %s: %r'%aux_node.abspath()) - continue - if g_glossaries_re.findall(ct): - if not self.env.MAKEGLOSSARIES: - raise Errors.WafError("The program 'makeglossaries' is missing!") - Logs.warn('calling makeglossaries') - self.env.SRCFILE=base - self.check_status('error when calling makeglossaries %s'%base,self.makeglossaries_fun()) - return - def texinputs(self): - return os.pathsep.join([k.abspath()for k in self.texinputs_nodes])+os.pathsep - def run(self): - env=self.env - if not env['PROMPT_LATEX']: - env.append_value('LATEXFLAGS','-interaction=batchmode') - env.append_value('PDFLATEXFLAGS','-interaction=batchmode') - env.append_value('XELATEXFLAGS','-interaction=batchmode') - self.cwd=self.inputs[0].parent.get_bld().abspath() - Logs.info('first pass on %s'%self.__class__.__name__) - cur_hash=self.hash_aux_nodes() - self.call_latex() - self.hash_aux_nodes() - self.bibtopic() - self.bibfile() - self.bibunits() - self.makeindex() - self.makeglossaries() - for i in range(10): - prev_hash=cur_hash - cur_hash=self.hash_aux_nodes() - if not cur_hash: - Logs.error('No aux.h to process') - if cur_hash and cur_hash==prev_hash: - break - Logs.info('calling %s'%self.__class__.__name__) - self.call_latex() - def hash_aux_nodes(self): - try: - self.aux_nodes - except AttributeError: - try: - self.aux_nodes=self.scan_aux(self.inputs[0].change_ext('.aux')) - except IOError: - return None - return Utils.h_list([Utils.h_file(x.abspath())for x in self.aux_nodes]) - def call_latex(self): - self.env.env={} - self.env.env.update(os.environ) - self.env.env.update({'TEXINPUTS':self.texinputs()}) - self.env.SRCFILE=self.inputs[0].abspath() - self.check_status('error when calling latex',self.texfun()) -class latex(tex): - texfun,vars=Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}',shell=False) -class pdflatex(tex): - texfun,vars=Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}',shell=False) -class xelatex(tex): - texfun,vars=Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}',shell=False) -class dvips(Task.Task): - run_str='${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}' - color='BLUE' - after=['latex','pdflatex','xelatex'] -class dvipdf(Task.Task): - run_str='${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}' - color='BLUE' - after=['latex','pdflatex','xelatex'] -class pdf2ps(Task.Task): - run_str='${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}' - color='BLUE' - after=['latex','pdflatex','xelatex'] -@feature('tex') -@before_method('process_source') -def apply_tex(self): - if not getattr(self,'type',None)in('latex','pdflatex','xelatex'): - self.type='pdflatex' - outs=Utils.to_list(getattr(self,'outs',[])) - self.env['PROMPT_LATEX']=getattr(self,'prompt',1) - deps_lst=[] - if getattr(self,'deps',None): - deps=self.to_list(self.deps) - for dep in deps: - if isinstance(dep,str): - n=self.path.find_resource(dep) - if not n: - self.bld.fatal('Could not find %r for %r'%(dep,self)) - if not n in deps_lst: - deps_lst.append(n) - elif isinstance(dep,Node.Node): - deps_lst.append(dep) - for node in self.to_nodes(self.source): - if self.type=='latex': - task=self.create_task('latex',node,node.change_ext('.dvi')) - elif self.type=='pdflatex': - task=self.create_task('pdflatex',node,node.change_ext('.pdf')) - elif self.type=='xelatex': - task=self.create_task('xelatex',node,node.change_ext('.pdf')) - task.env=self.env - if deps_lst: - for n in deps_lst: - if not n in task.dep_nodes: - task.dep_nodes.append(n) - if hasattr(self,'texinputs_nodes'): - task.texinputs_nodes=self.texinputs_nodes - else: - task.texinputs_nodes=[node.parent,node.parent.get_bld(),self.path,self.path.get_bld()] - lst=os.environ.get('TEXINPUTS','') - if self.env.TEXINPUTS: - lst+=os.pathsep+self.env.TEXINPUTS - if lst: - lst=lst.split(os.pathsep) - for x in lst: - if x: - if os.path.isabs(x): - p=self.bld.root.find_node(x) - if p: - task.texinputs_nodes.append(p) - else: - Logs.error('Invalid TEXINPUTS folder %s'%x) - else: - Logs.error('Cannot resolve relative paths in TEXINPUTS %s'%x) - if self.type=='latex': - if'ps'in outs: - tsk=self.create_task('dvips',task.outputs,node.change_ext('.ps')) - tsk.env.env=dict(os.environ) - if'pdf'in outs: - tsk=self.create_task('dvipdf',task.outputs,node.change_ext('.pdf')) - tsk.env.env=dict(os.environ) - elif self.type=='pdflatex': - if'ps'in outs: - self.create_task('pdf2ps',task.outputs,node.change_ext('.ps')) - self.source=[] -def configure(self): - v=self.env - for p in'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split(): - try: - self.find_program(p,var=p.upper()) - except self.errors.ConfigurationError: - pass - v['DVIPSFLAGS']='-Ppdf' diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py deleted file mode 100644 index 68af6511..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/vala.py +++ /dev/null @@ -1,211 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re -from waflib import Context,Task,Utils,Logs,Options,Errors,Node -from waflib.TaskGen import extension,taskgen_method -from waflib.Configure import conf -class valac(Task.Task): - vars=["VALAC","VALAC_VERSION","VALAFLAGS"] - ext_out=['.h'] - def run(self): - cmd=self.env.VALAC+self.env.VALAFLAGS - resources=getattr(self,'vala_exclude',[]) - cmd.extend([a.abspath()for a in self.inputs if a not in resources]) - ret=self.exec_command(cmd,cwd=self.vala_dir_node.abspath()) - if ret: - return ret - if self.generator.dump_deps_node: - self.generator.dump_deps_node.write('\n'.join(self.generator.packages)) - return ret -valac=Task.update_outputs(valac) -@taskgen_method -def init_vala_task(self): - self.profile=getattr(self,'profile','gobject') - if self.profile=='gobject': - self.uselib=Utils.to_list(getattr(self,'uselib',[])) - if not'GOBJECT'in self.uselib: - self.uselib.append('GOBJECT') - def addflags(flags): - self.env.append_value('VALAFLAGS',flags) - if self.profile: - addflags('--profile=%s'%self.profile) - valatask=self.valatask - if hasattr(self,'vala_dir'): - if isinstance(self.vala_dir,str): - valatask.vala_dir_node=self.path.get_bld().make_node(self.vala_dir) - try: - valatask.vala_dir_node.mkdir() - except OSError: - raise self.bld.fatal('Cannot create the vala dir %r'%valatask.vala_dir_node) - else: - valatask.vala_dir_node=self.vala_dir - else: - valatask.vala_dir_node=self.path.get_bld() - addflags('--directory=%s'%valatask.vala_dir_node.abspath()) - if hasattr(self,'thread'): - if self.profile=='gobject': - if not'GTHREAD'in self.uselib: - self.uselib.append('GTHREAD') - else: - Logs.warn("Profile %s means no threading support"%self.profile) - self.thread=False - if self.thread: - addflags('--thread') - self.is_lib='cprogram'not in self.features - if self.is_lib: - addflags('--library=%s'%self.target) - h_node=valatask.vala_dir_node.find_or_declare('%s.h'%self.target) - valatask.outputs.append(h_node) - addflags('--header=%s'%h_node.name) - valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi'%self.target)) - if getattr(self,'gir',None): - gir_node=valatask.vala_dir_node.find_or_declare('%s.gir'%self.gir) - addflags('--gir=%s'%gir_node.name) - valatask.outputs.append(gir_node) - self.vala_target_glib=getattr(self,'vala_target_glib',getattr(Options.options,'vala_target_glib',None)) - if self.vala_target_glib: - addflags('--target-glib=%s'%self.vala_target_glib) - addflags(['--define=%s'%x for x in getattr(self,'vala_defines',[])]) - packages_private=Utils.to_list(getattr(self,'packages_private',[])) - addflags(['--pkg=%s'%x for x in packages_private]) - def _get_api_version(): - api_version='1.0' - if hasattr(Context.g_module,'API_VERSION'): - version=Context.g_module.API_VERSION.split(".") - if version[0]=="0": - api_version="0."+version[1] - else: - api_version=version[0]+".0" - return api_version - self.includes=Utils.to_list(getattr(self,'includes',[])) - self.uselib=self.to_list(getattr(self,'uselib',[])) - valatask.install_path=getattr(self,'install_path','') - valatask.vapi_path=getattr(self,'vapi_path','${DATAROOTDIR}/vala/vapi') - valatask.pkg_name=getattr(self,'pkg_name',self.env['PACKAGE']) - valatask.header_path=getattr(self,'header_path','${INCLUDEDIR}/%s-%s'%(valatask.pkg_name,_get_api_version())) - valatask.install_binding=getattr(self,'install_binding',True) - self.packages=packages=Utils.to_list(getattr(self,'packages',[])) - self.vapi_dirs=vapi_dirs=Utils.to_list(getattr(self,'vapi_dirs',[])) - if hasattr(self,'use'): - local_packages=Utils.to_list(self.use)[:] - seen=[] - while len(local_packages)>0: - package=local_packages.pop() - if package in seen: - continue - seen.append(package) - try: - package_obj=self.bld.get_tgen_by_name(package) - except Errors.WafError: - continue - package_name=package_obj.target - for task in package_obj.tasks: - for output in task.outputs: - if output.name==package_name+".vapi": - valatask.set_run_after(task) - if package_name not in packages: - packages.append(package_name) - if output.parent not in vapi_dirs: - vapi_dirs.append(output.parent) - if output.parent not in self.includes: - self.includes.append(output.parent) - if hasattr(package_obj,'use'): - lst=self.to_list(package_obj.use) - lst.reverse() - local_packages=[pkg for pkg in lst if pkg not in seen]+local_packages - addflags(['--pkg=%s'%p for p in packages]) - for vapi_dir in vapi_dirs: - if isinstance(vapi_dir,Node.Node): - v_node=vapi_dir - else: - v_node=self.path.find_dir(vapi_dir) - if not v_node: - Logs.warn('Unable to locate Vala API directory: %r'%vapi_dir) - else: - addflags('--vapidir=%s'%v_node.abspath()) - self.dump_deps_node=None - if self.is_lib and self.packages: - self.dump_deps_node=valatask.vala_dir_node.find_or_declare('%s.deps'%self.target) - valatask.outputs.append(self.dump_deps_node) - self.includes.append(self.bld.srcnode.abspath()) - self.includes.append(self.bld.bldnode.abspath()) - if self.is_lib and valatask.install_binding: - headers_list=[o for o in valatask.outputs if o.suffix()==".h"] - try: - self.install_vheader.source=headers_list - except AttributeError: - self.install_vheader=self.bld.install_files(valatask.header_path,headers_list,self.env) - vapi_list=[o for o in valatask.outputs if(o.suffix()in(".vapi",".deps"))] - try: - self.install_vapi.source=vapi_list - except AttributeError: - self.install_vapi=self.bld.install_files(valatask.vapi_path,vapi_list,self.env) - gir_list=[o for o in valatask.outputs if o.suffix()=='.gir'] - try: - self.install_gir.source=gir_list - except AttributeError: - self.install_gir=self.bld.install_files(getattr(self,'gir_path','${DATAROOTDIR}/gir-1.0'),gir_list,self.env) - if hasattr(self,'vala_resources'): - nodes=self.to_nodes(self.vala_resources) - valatask.vala_exclude=getattr(valatask,'vala_exclude',[])+nodes - valatask.inputs.extend(nodes) - for x in nodes: - addflags(['--gresources',x.abspath()]) -@extension('.vala','.gs') -def vala_file(self,node): - try: - valatask=self.valatask - except AttributeError: - valatask=self.valatask=self.create_task('valac') - self.init_vala_task() - valatask.inputs.append(node) - name=node.name[:node.name.rfind('.')]+'.c' - c_node=valatask.vala_dir_node.find_or_declare(name) - valatask.outputs.append(c_node) - self.source.append(c_node) -@conf -def find_valac(self,valac_name,min_version): - valac=self.find_program(valac_name,var='VALAC') - try: - output=self.cmd_and_log(valac+['--version']) - except Exception: - valac_version=None - else: - ver=re.search(r'\d+.\d+.\d+',output).group(0).split('.') - valac_version=tuple([int(x)for x in ver]) - self.msg('Checking for %s version >= %r'%(valac_name,min_version),valac_version,valac_version and valac_version>=min_version) - if valac and valac_version= %r"%(valac_name,valac_version,min_version)) - self.env['VALAC_VERSION']=valac_version - return valac -@conf -def check_vala(self,min_version=(0,8,0),branch=None): - if not branch: - branch=min_version[:2] - try: - find_valac(self,'valac-%d.%d'%(branch[0],branch[1]),min_version) - except self.errors.ConfigurationError: - find_valac(self,'valac',min_version) -@conf -def check_vala_deps(self): - if not self.env['HAVE_GOBJECT']: - pkg_args={'package':'gobject-2.0','uselib_store':'GOBJECT','args':'--cflags --libs'} - if getattr(Options.options,'vala_target_glib',None): - pkg_args['atleast_version']=Options.options.vala_target_glib - self.check_cfg(**pkg_args) - if not self.env['HAVE_GTHREAD']: - pkg_args={'package':'gthread-2.0','uselib_store':'GTHREAD','args':'--cflags --libs'} - if getattr(Options.options,'vala_target_glib',None): - pkg_args['atleast_version']=Options.options.vala_target_glib - self.check_cfg(**pkg_args) -def configure(self): - self.load('gnu_dirs') - self.check_vala_deps() - self.check_vala() - self.env.VALAFLAGS=['-C'] -def options(opt): - opt.load('gnu_dirs') - valaopts=opt.add_option_group('Vala Compiler Options') - valaopts.add_option('--vala-target-glib',default=None,dest='vala_target_glib',metavar='MAJOR.MINOR',help='Target version of glib for Vala GObject code generation') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py deleted file mode 100644 index d07e1ed8..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/waf_unit_test.py +++ /dev/null @@ -1,106 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os -from waflib.TaskGen import feature,after_method,taskgen_method -from waflib import Utils,Task,Logs,Options -testlock=Utils.threading.Lock() -@feature('test') -@after_method('apply_link') -def make_test(self): - if getattr(self,'link_task',None): - self.create_task('utest',self.link_task.outputs) -@taskgen_method -def add_test_results(self,tup): - Logs.debug("ut: %r",tup) - self.utest_result=tup - try: - self.bld.utest_results.append(tup) - except AttributeError: - self.bld.utest_results=[tup] -class utest(Task.Task): - color='PINK' - after=['vnum','inst'] - vars=[] - def runnable_status(self): - if getattr(Options.options,'no_tests',False): - return Task.SKIP_ME - ret=super(utest,self).runnable_status() - if ret==Task.SKIP_ME: - if getattr(Options.options,'all_tests',False): - return Task.RUN_ME - return ret - def add_path(self,dct,path,var): - dct[var]=os.pathsep.join(Utils.to_list(path)+[os.environ.get(var,'')]) - def get_test_env(self): - try: - fu=getattr(self.generator.bld,'all_test_paths') - except AttributeError: - fu=os.environ.copy() - lst=[] - for g in self.generator.bld.groups: - for tg in g: - if getattr(tg,'link_task',None): - s=tg.link_task.outputs[0].parent.abspath() - if s not in lst: - lst.append(s) - if Utils.is_win32: - self.add_path(fu,lst,'PATH') - elif Utils.unversioned_sys_platform()=='darwin': - self.add_path(fu,lst,'DYLD_LIBRARY_PATH') - self.add_path(fu,lst,'LD_LIBRARY_PATH') - else: - self.add_path(fu,lst,'LD_LIBRARY_PATH') - self.generator.bld.all_test_paths=fu - return fu - def run(self): - filename=self.inputs[0].abspath() - self.ut_exec=getattr(self.generator,'ut_exec',[filename]) - if getattr(self.generator,'ut_fun',None): - self.generator.ut_fun(self) - cwd=getattr(self.generator,'ut_cwd','')or self.inputs[0].parent.abspath() - testcmd=getattr(self.generator,'ut_cmd',False)or getattr(Options.options,'testcmd',False) - if testcmd: - self.ut_exec=(testcmd%self.ut_exec[0]).split(' ') - proc=Utils.subprocess.Popen(self.ut_exec,cwd=cwd,env=self.get_test_env(),stderr=Utils.subprocess.PIPE,stdout=Utils.subprocess.PIPE) - (stdout,stderr)=proc.communicate() - self.waf_unit_test_results=tup=(filename,proc.returncode,stdout,stderr) - testlock.acquire() - try: - return self.generator.add_test_results(tup) - finally: - testlock.release() - def post_run(self): - super(utest,self).post_run() - if getattr(Options.options,'clear_failed_tests',False)and self.waf_unit_test_results[1]: - self.generator.bld.task_sigs[self.uid()]=None -def summary(bld): - lst=getattr(bld,'utest_results',[]) - if lst: - Logs.pprint('CYAN','execution summary') - total=len(lst) - tfail=len([x for x in lst if x[1]]) - Logs.pprint('CYAN',' tests that pass %d/%d'%(total-tfail,total)) - for(f,code,out,err)in lst: - if not code: - Logs.pprint('CYAN',' %s'%f) - Logs.pprint('CYAN',' tests that fail %d/%d'%(tfail,total)) - for(f,code,out,err)in lst: - if code: - Logs.pprint('CYAN',' %s'%f) -def set_exit_code(bld): - lst=getattr(bld,'utest_results',[]) - for(f,code,out,err)in lst: - if code: - msg=[] - if out: - msg.append('stdout:%s%s'%(os.linesep,out.decode('utf-8'))) - if err: - msg.append('stderr:%s%s'%(os.linesep,err.decode('utf-8'))) - bld.fatal(os.linesep.join(msg)) -def options(opt): - opt.add_option('--notests',action='store_true',default=False,help='Exec no unit tests',dest='no_tests') - opt.add_option('--alltests',action='store_true',default=False,help='Exec all unit tests',dest='all_tests') - opt.add_option('--clear-failed',action='store_true',default=False,help='Force failed unit tests to run again next time',dest='clear_failed_tests') - opt.add_option('--testcmd',action='store',default=False,help='Run the unit tests using the test-cmd string'' example "--test-cmd="valgrind --error-exitcode=1'' %s" to run under valgrind',dest='testcmd') diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py deleted file mode 100644 index a055887b..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/winres.py +++ /dev/null @@ -1,85 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import re,traceback -from waflib import Task,Logs,Utils -from waflib.TaskGen import extension -from waflib.Tools import c_preproc -@extension('.rc') -def rc_file(self,node): - obj_ext='.rc.o' - if self.env['WINRC_TGT_F']=='/fo': - obj_ext='.res' - rctask=self.create_task('winrc',node,node.change_ext(obj_ext)) - try: - self.compiled_tasks.append(rctask) - except AttributeError: - self.compiled_tasks=[rctask] -re_lines=re.compile('(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|''(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',re.IGNORECASE|re.MULTILINE) -class rc_parser(c_preproc.c_parser): - def filter_comments(self,filepath): - code=Utils.readf(filepath) - if c_preproc.use_trigraphs: - for(a,b)in c_preproc.trig_def:code=code.split(a).join(b) - code=c_preproc.re_nl.sub('',code) - code=c_preproc.re_cpp.sub(c_preproc.repl,code) - ret=[] - for m in re.finditer(re_lines,code): - if m.group(2): - ret.append((m.group(2),m.group(3))) - else: - ret.append(('include',m.group(5))) - return ret - def addlines(self,node): - self.currentnode_stack.append(node.parent) - filepath=node.abspath() - self.count_files+=1 - if self.count_files>c_preproc.recursion_limit: - raise c_preproc.PreprocError("recursion limit exceeded") - pc=self.parse_cache - Logs.debug('preproc: reading file %r',filepath) - try: - lns=pc[filepath] - except KeyError: - pass - else: - self.lines.extend(lns) - return - try: - lines=self.filter_comments(filepath) - lines.append((c_preproc.POPFILE,'')) - lines.reverse() - pc[filepath]=lines - self.lines.extend(lines) - except IOError: - raise c_preproc.PreprocError("could not read the file %s"%filepath) - except Exception: - if Logs.verbose>0: - Logs.error("parsing %s failed"%filepath) - traceback.print_exc() -class winrc(Task.Task): - run_str='${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}' - color='BLUE' - def scan(self): - tmp=rc_parser(self.generator.includes_nodes) - tmp.start(self.inputs[0],self.env) - nodes=tmp.nodes - names=tmp.names - if Logs.verbose: - Logs.debug('deps: deps for %s: %r; unresolved %r'%(str(self),nodes,names)) - return(nodes,names) -def configure(conf): - v=conf.env - v['WINRC_TGT_F']='-o' - v['WINRC_SRC_F']='-i' - if not conf.env.WINRC: - if v.CC_NAME=='msvc': - conf.find_program('RC',var='WINRC',path_list=v['PATH']) - v['WINRC_TGT_F']='/fo' - v['WINRC_SRC_F']='' - else: - conf.find_program('windres',var='WINRC',path_list=v['PATH']) - if not conf.env.WINRC: - conf.fatal('winrc was not found!') - v['WINRCFLAGS']=[] diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py deleted file mode 100644 index c56443b7..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlc.py +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar -from waflib.Configure import conf -@conf -def find_xlc(conf): - cc=conf.find_program(['xlc_r','xlc'],var='CC') - conf.get_xlc_version(cc) - conf.env.CC_NAME='xlc' -@conf -def xlc_common_flags(conf): - v=conf.env - v['CC_SRC_F']=[] - v['CC_TGT_F']=['-c','-o'] - if not v['LINK_CC']:v['LINK_CC']=v['CC'] - v['CCLNK_SRC_F']=[] - v['CCLNK_TGT_F']=['-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['RPATH_ST']='-Wl,-rpath,%s' - v['SONAME_ST']=[] - v['SHLIB_MARKER']=[] - v['STLIB_MARKER']=[] - v['LINKFLAGS_cprogram']=['-Wl,-brtl'] - v['cprogram_PATTERN']='%s' - v['CFLAGS_cshlib']=['-fPIC'] - v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull'] - v['cshlib_PATTERN']='lib%s.so' - v['LINKFLAGS_cstlib']=[] - v['cstlib_PATTERN']='lib%s.a' -def configure(conf): - conf.find_xlc() - conf.find_ar() - conf.xlc_common_flags() - conf.cc_load_tools() - conf.cc_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py deleted file mode 100644 index f348bbfd..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Tools/xlcxx.py +++ /dev/null @@ -1,43 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -from waflib.Tools import ccroot,ar -from waflib.Configure import conf -@conf -def find_xlcxx(conf): - cxx=conf.find_program(['xlc++_r','xlc++'],var='CXX') - conf.get_xlc_version(cxx) - conf.env.CXX_NAME='xlc++' -@conf -def xlcxx_common_flags(conf): - v=conf.env - v['CXX_SRC_F']=[] - v['CXX_TGT_F']=['-c','-o'] - if not v['LINK_CXX']:v['LINK_CXX']=v['CXX'] - v['CXXLNK_SRC_F']=[] - v['CXXLNK_TGT_F']=['-o'] - v['CPPPATH_ST']='-I%s' - v['DEFINES_ST']='-D%s' - v['LIB_ST']='-l%s' - v['LIBPATH_ST']='-L%s' - v['STLIB_ST']='-l%s' - v['STLIBPATH_ST']='-L%s' - v['RPATH_ST']='-Wl,-rpath,%s' - v['SONAME_ST']=[] - v['SHLIB_MARKER']=[] - v['STLIB_MARKER']=[] - v['LINKFLAGS_cxxprogram']=['-Wl,-brtl'] - v['cxxprogram_PATTERN']='%s' - v['CXXFLAGS_cxxshlib']=['-fPIC'] - v['LINKFLAGS_cxxshlib']=['-G','-Wl,-brtl,-bexpfull'] - v['cxxshlib_PATTERN']='lib%s.so' - v['LINKFLAGS_cxxstlib']=[] - v['cxxstlib_PATTERN']='lib%s.a' -def configure(conf): - conf.find_xlcxx() - conf.find_ar() - conf.xlcxx_common_flags() - conf.cxx_load_tools() - conf.cxx_add_flags() - conf.link_add_flags() diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py deleted file mode 100644 index b8706eca..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/Utils.py +++ /dev/null @@ -1,468 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,sys,errno,traceback,inspect,re,shutil,datetime,gc,platform -import subprocess -from collections import deque,defaultdict -try: - import _winreg as winreg -except ImportError: - try: - import winreg - except ImportError: - winreg=None -from waflib import Errors -try: - from collections import UserDict -except ImportError: - from UserDict import UserDict -try: - from hashlib import md5 -except ImportError: - try: - from md5 import md5 - except ImportError: - pass -try: - import threading -except ImportError: - if not'JOBS'in os.environ: - os.environ['JOBS']='1' - class threading(object): - pass - class Lock(object): - def acquire(self): - pass - def release(self): - pass - threading.Lock=threading.Thread=Lock -else: - run_old=threading.Thread.run - def run(*args,**kwargs): - try: - run_old(*args,**kwargs) - except(KeyboardInterrupt,SystemExit): - raise - except Exception: - sys.excepthook(*sys.exc_info()) - threading.Thread.run=run -SIG_NIL='iluvcuteoverload'.encode() -O644=420 -O755=493 -rot_chr=['\\','|','/','-'] -rot_idx=0 -try: - from collections import OrderedDict as ordered_iter_dict -except ImportError: - class ordered_iter_dict(dict): - def __init__(self,*k,**kw): - self.lst=[] - dict.__init__(self,*k,**kw) - def clear(self): - dict.clear(self) - self.lst=[] - def __setitem__(self,key,value): - dict.__setitem__(self,key,value) - try: - self.lst.remove(key) - except ValueError: - pass - self.lst.append(key) - def __delitem__(self,key): - dict.__delitem__(self,key) - try: - self.lst.remove(key) - except ValueError: - pass - def __iter__(self): - for x in self.lst: - yield x - def keys(self): - return self.lst -is_win32=os.sep=='\\'or sys.platform=='win32' -def readf(fname,m='r',encoding='ISO8859-1'): - if sys.hexversion>0x3000000 and not'b'in m: - m+='b' - f=open(fname,m) - try: - txt=f.read() - finally: - f.close() - if encoding: - txt=txt.decode(encoding) - else: - txt=txt.decode() - else: - f=open(fname,m) - try: - txt=f.read() - finally: - f.close() - return txt -def writef(fname,data,m='w',encoding='ISO8859-1'): - if sys.hexversion>0x3000000 and not'b'in m: - data=data.encode(encoding) - m+='b' - f=open(fname,m) - try: - f.write(data) - finally: - f.close() -def h_file(fname): - f=open(fname,'rb') - m=md5() - try: - while fname: - fname=f.read(200000) - m.update(fname) - finally: - f.close() - return m.digest() -def readf_win32(f,m='r',encoding='ISO8859-1'): - flags=os.O_NOINHERIT|os.O_RDONLY - if'b'in m: - flags|=os.O_BINARY - if'+'in m: - flags|=os.O_RDWR - try: - fd=os.open(f,flags) - except OSError: - raise IOError('Cannot read from %r'%f) - if sys.hexversion>0x3000000 and not'b'in m: - m+='b' - f=os.fdopen(fd,m) - try: - txt=f.read() - finally: - f.close() - if encoding: - txt=txt.decode(encoding) - else: - txt=txt.decode() - else: - f=os.fdopen(fd,m) - try: - txt=f.read() - finally: - f.close() - return txt -def writef_win32(f,data,m='w',encoding='ISO8859-1'): - if sys.hexversion>0x3000000 and not'b'in m: - data=data.encode(encoding) - m+='b' - flags=os.O_CREAT|os.O_TRUNC|os.O_WRONLY|os.O_NOINHERIT - if'b'in m: - flags|=os.O_BINARY - if'+'in m: - flags|=os.O_RDWR - try: - fd=os.open(f,flags) - except OSError: - raise IOError('Cannot write to %r'%f) - f=os.fdopen(fd,m) - try: - f.write(data) - finally: - f.close() -def h_file_win32(fname): - try: - fd=os.open(fname,os.O_BINARY|os.O_RDONLY|os.O_NOINHERIT) - except OSError: - raise IOError('Cannot read from %r'%fname) - f=os.fdopen(fd,'rb') - m=md5() - try: - while fname: - fname=f.read(200000) - m.update(fname) - finally: - f.close() - return m.digest() -readf_unix=readf -writef_unix=writef -h_file_unix=h_file -if hasattr(os,'O_NOINHERIT')and sys.hexversion<0x3040000: - readf=readf_win32 - writef=writef_win32 - h_file=h_file_win32 -try: - x=''.encode('hex') -except LookupError: - import binascii - def to_hex(s): - ret=binascii.hexlify(s) - if not isinstance(ret,str): - ret=ret.decode('utf-8') - return ret -else: - def to_hex(s): - return s.encode('hex') -to_hex.__doc__=""" -Return the hexadecimal representation of a string - -:param s: string to convert -:type s: string -""" -def listdir_win32(s): - if not s: - try: - import ctypes - except ImportError: - return[x+':\\'for x in list('ABCDEFGHIJKLMNOPQRSTUVWXYZ')] - else: - dlen=4 - maxdrives=26 - buf=ctypes.create_string_buffer(maxdrives*dlen) - ndrives=ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen,ctypes.byref(buf)) - return[str(buf.raw[4*i:4*i+2].decode('ascii'))for i in range(int(ndrives/dlen))] - if len(s)==2 and s[1]==":": - s+=os.sep - if not os.path.isdir(s): - e=OSError('%s is not a directory'%s) - e.errno=errno.ENOENT - raise e - return os.listdir(s) -listdir=os.listdir -if is_win32: - listdir=listdir_win32 -def num2ver(ver): - if isinstance(ver,str): - ver=tuple(ver.split('.')) - if isinstance(ver,tuple): - ret=0 - for i in range(4): - if i0x3000000: - ret=ret.encode('iso8859-1','xmlcharrefreplace') - return ret -reg_subst=re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}") -def subst_vars(expr,params): - def repl_var(m): - if m.group(1): - return'\\' - if m.group(2): - return'$' - try: - return params.get_flat(m.group(3)) - except AttributeError: - return params[m.group(3)] - return reg_subst.sub(repl_var,expr) -def destos_to_binfmt(key): - if key=='darwin': - return'mac-o' - elif key in('win32','cygwin','uwin','msys'): - return'pe' - return'elf' -def unversioned_sys_platform(): - s=sys.platform - if s.startswith('java'): - from java.lang import System - s=System.getProperty('os.name') - if s=='Mac OS X': - return'darwin' - elif s.startswith('Windows '): - return'win32' - elif s=='OS/2': - return'os2' - elif s=='HP-UX': - return'hp-ux' - elif s in('SunOS','Solaris'): - return'sunos' - else:s=s.lower() - if s=='powerpc': - return'darwin' - if s=='win32'or s=='os2': - return s - if s=='cli'and os.name=='nt': - return'win32' - return re.split('\d+$',s)[0] -def nada(*k,**kw): - pass -class Timer(object): - def __init__(self): - self.start_time=datetime.datetime.utcnow() - def __str__(self): - delta=datetime.datetime.utcnow()-self.start_time - days=delta.days - hours,rem=divmod(delta.seconds,3600) - minutes,seconds=divmod(rem,60) - seconds+=delta.microseconds*1e-6 - result='' - if days: - result+='%dd'%days - if days or hours: - result+='%dh'%hours - if days or hours or minutes: - result+='%dm'%minutes - return'%s%.3fs'%(result,seconds) -if is_win32: - old=shutil.copy2 - def copy2(src,dst): - old(src,dst) - shutil.copystat(src,dst) - setattr(shutil,'copy2',copy2) -if os.name=='java': - try: - gc.disable() - gc.enable() - except NotImplementedError: - gc.disable=gc.enable -def read_la_file(path): - sp=re.compile(r'^([^=]+)=\'(.*)\'$') - dc={} - for line in readf(path).splitlines(): - try: - _,left,right,_=sp.split(line.strip()) - dc[left]=right - except ValueError: - pass - return dc -def nogc(fun): - def f(*k,**kw): - try: - gc.disable() - ret=fun(*k,**kw) - finally: - gc.enable() - return ret - f.__doc__=fun.__doc__ - return f -def run_once(fun): - cache={} - def wrap(k): - try: - return cache[k] - except KeyError: - ret=fun(k) - cache[k]=ret - return ret - wrap.__cache__=cache - wrap.__name__=fun.__name__ - return wrap -def get_registry_app_path(key,filename): - if not winreg: - return None - try: - result=winreg.QueryValue(key,"Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe"%filename[0]) - except WindowsError: - pass - else: - if os.path.isfile(result): - return result -def lib64(): - if os.sep=='/': - if platform.architecture()[0]=='64bit': - if os.path.exists('/usr/lib64')and not os.path.exists('/usr/lib32'): - return'64' - return'' -def sane_path(p): - return os.path.abspath(os.path.expanduser(p)) diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py deleted file mode 100644 index 55e850d6..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py deleted file mode 100644 index 1d8bc78f..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/ansiterm.py +++ /dev/null @@ -1,238 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os,re,sys -from waflib import Utils -wlock=Utils.threading.Lock() -try: - from ctypes import Structure,windll,c_short,c_ushort,c_ulong,c_int,byref,c_wchar,POINTER,c_long -except ImportError: - class AnsiTerm(object): - def __init__(self,stream): - self.stream=stream - try: - self.errors=self.stream.errors - except AttributeError: - pass - self.encoding=self.stream.encoding - def write(self,txt): - try: - wlock.acquire() - self.stream.write(txt) - self.stream.flush() - finally: - wlock.release() - def fileno(self): - return self.stream.fileno() - def flush(self): - self.stream.flush() - def isatty(self): - return self.stream.isatty() -else: - class COORD(Structure): - _fields_=[("X",c_short),("Y",c_short)] - class SMALL_RECT(Structure): - _fields_=[("Left",c_short),("Top",c_short),("Right",c_short),("Bottom",c_short)] - class CONSOLE_SCREEN_BUFFER_INFO(Structure): - _fields_=[("Size",COORD),("CursorPosition",COORD),("Attributes",c_ushort),("Window",SMALL_RECT),("MaximumWindowSize",COORD)] - class CONSOLE_CURSOR_INFO(Structure): - _fields_=[('dwSize',c_ulong),('bVisible',c_int)] - try: - _type=unicode - except NameError: - _type=str - to_int=lambda number,default:number and int(number)or default - STD_OUTPUT_HANDLE=-11 - STD_ERROR_HANDLE=-12 - windll.kernel32.GetStdHandle.argtypes=[c_ulong] - windll.kernel32.GetStdHandle.restype=c_ulong - windll.kernel32.GetConsoleScreenBufferInfo.argtypes=[c_ulong,POINTER(CONSOLE_SCREEN_BUFFER_INFO)] - windll.kernel32.GetConsoleScreenBufferInfo.restype=c_long - windll.kernel32.SetConsoleTextAttribute.argtypes=[c_ulong,c_ushort] - windll.kernel32.SetConsoleTextAttribute.restype=c_long - windll.kernel32.FillConsoleOutputCharacterW.argtypes=[c_ulong,c_wchar,c_ulong,POINTER(COORD),POINTER(c_ulong)] - windll.kernel32.FillConsoleOutputCharacterW.restype=c_long - windll.kernel32.FillConsoleOutputAttribute.argtypes=[c_ulong,c_ushort,c_ulong,POINTER(COORD),POINTER(c_ulong)] - windll.kernel32.FillConsoleOutputAttribute.restype=c_long - windll.kernel32.SetConsoleCursorPosition.argtypes=[c_ulong,POINTER(COORD)] - windll.kernel32.SetConsoleCursorPosition.restype=c_long - windll.kernel32.SetConsoleCursorInfo.argtypes=[c_ulong,POINTER(CONSOLE_CURSOR_INFO)] - windll.kernel32.SetConsoleCursorInfo.restype=c_long - class AnsiTerm(object): - def __init__(self,s): - self.stream=s - try: - self.errors=s.errors - except AttributeError: - pass - self.encoding=s.encoding - self.cursor_history=[] - handle=(s.fileno()==2)and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE - self.hconsole=windll.kernel32.GetStdHandle(handle) - self._sbinfo=CONSOLE_SCREEN_BUFFER_INFO() - self._csinfo=CONSOLE_CURSOR_INFO() - windll.kernel32.GetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) - self._orig_sbinfo=CONSOLE_SCREEN_BUFFER_INFO() - r=windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._orig_sbinfo)) - self._isatty=r==1 - def screen_buffer_info(self): - windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole,byref(self._sbinfo)) - return self._sbinfo - def clear_line(self,param): - mode=param and int(param)or 0 - sbinfo=self.screen_buffer_info() - if mode==1: - line_start=COORD(0,sbinfo.CursorPosition.Y) - line_length=sbinfo.Size.X - elif mode==2: - line_start=COORD(sbinfo.CursorPosition.X,sbinfo.CursorPosition.Y) - line_length=sbinfo.Size.X-sbinfo.CursorPosition.X - else: - line_start=sbinfo.CursorPosition - line_length=sbinfo.Size.X-sbinfo.CursorPosition.X - chars_written=c_ulong() - windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),line_length,line_start,byref(chars_written)) - windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,line_length,line_start,byref(chars_written)) - def clear_screen(self,param): - mode=to_int(param,0) - sbinfo=self.screen_buffer_info() - if mode==1: - clear_start=COORD(0,0) - clear_length=sbinfo.CursorPosition.X*sbinfo.CursorPosition.Y - elif mode==2: - clear_start=COORD(0,0) - clear_length=sbinfo.Size.X*sbinfo.Size.Y - windll.kernel32.SetConsoleCursorPosition(self.hconsole,clear_start) - else: - clear_start=sbinfo.CursorPosition - clear_length=((sbinfo.Size.X-sbinfo.CursorPosition.X)+sbinfo.Size.X*(sbinfo.Size.Y-sbinfo.CursorPosition.Y)) - chars_written=c_ulong() - windll.kernel32.FillConsoleOutputCharacterW(self.hconsole,c_wchar(' '),clear_length,clear_start,byref(chars_written)) - windll.kernel32.FillConsoleOutputAttribute(self.hconsole,sbinfo.Attributes,clear_length,clear_start,byref(chars_written)) - def push_cursor(self,param): - sbinfo=self.screen_buffer_info() - self.cursor_history.append(sbinfo.CursorPosition) - def pop_cursor(self,param): - if self.cursor_history: - old_pos=self.cursor_history.pop() - windll.kernel32.SetConsoleCursorPosition(self.hconsole,old_pos) - def set_cursor(self,param): - y,sep,x=param.partition(';') - x=to_int(x,1)-1 - y=to_int(y,1)-1 - sbinfo=self.screen_buffer_info() - new_pos=COORD(min(max(0,x),sbinfo.Size.X),min(max(0,y),sbinfo.Size.Y)) - windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) - def set_column(self,param): - x=to_int(param,1)-1 - sbinfo=self.screen_buffer_info() - new_pos=COORD(min(max(0,x),sbinfo.Size.X),sbinfo.CursorPosition.Y) - windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) - def move_cursor(self,x_offset=0,y_offset=0): - sbinfo=self.screen_buffer_info() - new_pos=COORD(min(max(0,sbinfo.CursorPosition.X+x_offset),sbinfo.Size.X),min(max(0,sbinfo.CursorPosition.Y+y_offset),sbinfo.Size.Y)) - windll.kernel32.SetConsoleCursorPosition(self.hconsole,new_pos) - def move_up(self,param): - self.move_cursor(y_offset=-to_int(param,1)) - def move_down(self,param): - self.move_cursor(y_offset=to_int(param,1)) - def move_left(self,param): - self.move_cursor(x_offset=-to_int(param,1)) - def move_right(self,param): - self.move_cursor(x_offset=to_int(param,1)) - def next_line(self,param): - sbinfo=self.screen_buffer_info() - self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=to_int(param,1)) - def prev_line(self,param): - sbinfo=self.screen_buffer_info() - self.move_cursor(x_offset=-sbinfo.CursorPosition.X,y_offset=-to_int(param,1)) - def rgb2bgr(self,c): - return((c&1)<<2)|(c&2)|((c&4)>>2) - def set_color(self,param): - cols=param.split(';') - sbinfo=self.screen_buffer_info() - attr=sbinfo.Attributes - for c in cols: - c=to_int(c,0) - if 29>4)|((attr&0x07)<<4) - windll.kernel32.SetConsoleTextAttribute(self.hconsole,attr) - def show_cursor(self,param): - self._csinfo.bVisible=1 - windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) - def hide_cursor(self,param): - self._csinfo.bVisible=0 - windll.kernel32.SetConsoleCursorInfo(self.hconsole,byref(self._csinfo)) - ansi_command_table={'A':move_up,'B':move_down,'C':move_right,'D':move_left,'E':next_line,'F':prev_line,'G':set_column,'H':set_cursor,'f':set_cursor,'J':clear_screen,'K':clear_line,'h':show_cursor,'l':hide_cursor,'m':set_color,'s':push_cursor,'u':pop_cursor,} - ansi_tokens=re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))') - def write(self,text): - try: - wlock.acquire() - if self._isatty: - for param,cmd,txt in self.ansi_tokens.findall(text): - if cmd: - cmd_func=self.ansi_command_table.get(cmd) - if cmd_func: - cmd_func(self,param) - else: - self.writeconsole(txt) - else: - self.stream.write(text) - finally: - wlock.release() - def writeconsole(self,txt): - chars_written=c_ulong() - writeconsole=windll.kernel32.WriteConsoleA - if isinstance(txt,_type): - writeconsole=windll.kernel32.WriteConsoleW - done=0 - todo=len(txt) - chunk=32<<10 - while todo!=0: - doing=min(chunk,todo) - buf=txt[done:done+doing] - r=writeconsole(self.hconsole,buf,doing,byref(chars_written),None) - if r==0: - chunk>>=1 - continue - done+=doing - todo-=doing - def fileno(self): - return self.stream.fileno() - def flush(self): - pass - def isatty(self): - return self._isatty - if sys.stdout.isatty()or sys.stderr.isatty(): - handle=sys.stdout.isatty()and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE - console=windll.kernel32.GetStdHandle(handle) - sbinfo=CONSOLE_SCREEN_BUFFER_INFO() - def get_term_cols(): - windll.kernel32.GetConsoleScreenBufferInfo(console,byref(sbinfo)) - return sbinfo.Size.X-1 -try: - import struct,fcntl,termios -except ImportError: - pass -else: - if(sys.stdout.isatty()or sys.stderr.isatty())and os.environ.get('TERM','')not in('dumb','emacs'): - FD=sys.stdout.isatty()and sys.stdout.fileno()or sys.stderr.fileno() - def fun(): - return struct.unpack("HHHH",fcntl.ioctl(FD,termios.TIOCGWINSZ,struct.pack("HHHH",0,0,0,0)))[1] - try: - fun() - except Exception as e: - pass - else: - get_term_cols=fun diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py deleted file mode 100644 index 55e850d6..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py deleted file mode 100644 index fd541452..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/extras/compat15.py +++ /dev/null @@ -1,301 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import sys -from waflib import ConfigSet,Logs,Options,Scripting,Task,Build,Configure,Node,Runner,TaskGen,Utils,Errors,Context -sys.modules['Environment']=ConfigSet -ConfigSet.Environment=ConfigSet.ConfigSet -sys.modules['Logs']=Logs -sys.modules['Options']=Options -sys.modules['Scripting']=Scripting -sys.modules['Task']=Task -sys.modules['Build']=Build -sys.modules['Configure']=Configure -sys.modules['Node']=Node -sys.modules['Runner']=Runner -sys.modules['TaskGen']=TaskGen -sys.modules['Utils']=Utils -sys.modules['Constants']=Context -Context.SRCDIR='' -Context.BLDDIR='' -from waflib.Tools import c_preproc -sys.modules['preproc']=c_preproc -from waflib.Tools import c_config -sys.modules['config_c']=c_config -ConfigSet.ConfigSet.copy=ConfigSet.ConfigSet.derive -ConfigSet.ConfigSet.set_variant=Utils.nada -Utils.pproc=Utils.subprocess -Build.BuildContext.add_subdirs=Build.BuildContext.recurse -Build.BuildContext.new_task_gen=Build.BuildContext.__call__ -Build.BuildContext.is_install=0 -Node.Node.relpath_gen=Node.Node.path_from -Utils.pproc=Utils.subprocess -Utils.get_term_cols=Logs.get_term_cols -def cmd_output(cmd,**kw): - silent=False - if'silent'in kw: - silent=kw['silent'] - del(kw['silent']) - if'e'in kw: - tmp=kw['e'] - del(kw['e']) - kw['env']=tmp - kw['shell']=isinstance(cmd,str) - kw['stdout']=Utils.subprocess.PIPE - if silent: - kw['stderr']=Utils.subprocess.PIPE - try: - p=Utils.subprocess.Popen(cmd,**kw) - output=p.communicate()[0] - except OSError as e: - raise ValueError(str(e)) - if p.returncode: - if not silent: - msg="command execution failed: %s -> %r"%(cmd,str(output)) - raise ValueError(msg) - output='' - return output -Utils.cmd_output=cmd_output -def name_to_obj(self,s,env=None): - if Logs.verbose: - Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"') - return self.get_tgen_by_name(s) -Build.BuildContext.name_to_obj=name_to_obj -def env_of_name(self,name): - try: - return self.all_envs[name] - except KeyError: - Logs.error('no such environment: '+name) - return None -Build.BuildContext.env_of_name=env_of_name -def set_env_name(self,name,env): - self.all_envs[name]=env - return env -Configure.ConfigurationContext.set_env_name=set_env_name -def retrieve(self,name,fromenv=None): - try: - env=self.all_envs[name] - except KeyError: - env=ConfigSet.ConfigSet() - self.prepare_env(env) - self.all_envs[name]=env - else: - if fromenv: - Logs.warn("The environment %s may have been configured already"%name) - return env -Configure.ConfigurationContext.retrieve=retrieve -Configure.ConfigurationContext.sub_config=Configure.ConfigurationContext.recurse -Configure.ConfigurationContext.check_tool=Configure.ConfigurationContext.load -Configure.conftest=Configure.conf -Configure.ConfigurationError=Errors.ConfigurationError -Utils.WafError=Errors.WafError -Options.OptionsContext.sub_options=Options.OptionsContext.recurse -Options.OptionsContext.tool_options=Context.Context.load -Options.Handler=Options.OptionsContext -Task.simple_task_type=Task.task_type_from_func=Task.task_factory -Task.TaskBase.classes=Task.classes -def setitem(self,key,value): - if key.startswith('CCFLAGS'): - key=key[1:] - self.table[key]=value -ConfigSet.ConfigSet.__setitem__=setitem -@TaskGen.feature('d') -@TaskGen.before('apply_incpaths') -def old_importpaths(self): - if getattr(self,'importpaths',[]): - self.includes=self.importpaths -from waflib import Context -eld=Context.load_tool -def load_tool(*k,**kw): - ret=eld(*k,**kw) - if'set_options'in ret.__dict__: - if Logs.verbose: - Logs.warn('compat: rename "set_options" to options') - ret.options=ret.set_options - if'detect'in ret.__dict__: - if Logs.verbose: - Logs.warn('compat: rename "detect" to "configure"') - ret.configure=ret.detect - return ret -Context.load_tool=load_tool -def get_curdir(self): - return self.path.abspath() -Context.Context.curdir=property(get_curdir,Utils.nada) -def get_srcdir(self): - return self.srcnode.abspath() -Configure.ConfigurationContext.srcdir=property(get_srcdir,Utils.nada) -def get_blddir(self): - return self.bldnode.abspath() -Configure.ConfigurationContext.blddir=property(get_blddir,Utils.nada) -Configure.ConfigurationContext.check_message_1=Configure.ConfigurationContext.start_msg -Configure.ConfigurationContext.check_message_2=Configure.ConfigurationContext.end_msg -rev=Context.load_module -def load_module(path,encoding=None): - ret=rev(path,encoding) - if'set_options'in ret.__dict__: - if Logs.verbose: - Logs.warn('compat: rename "set_options" to "options" (%r)'%path) - ret.options=ret.set_options - if'srcdir'in ret.__dict__: - if Logs.verbose: - Logs.warn('compat: rename "srcdir" to "top" (%r)'%path) - ret.top=ret.srcdir - if'blddir'in ret.__dict__: - if Logs.verbose: - Logs.warn('compat: rename "blddir" to "out" (%r)'%path) - ret.out=ret.blddir - Utils.g_module=Context.g_module - Options.launch_dir=Context.launch_dir - return ret -Context.load_module=load_module -old_post=TaskGen.task_gen.post -def post(self): - self.features=self.to_list(self.features) - if'cc'in self.features: - if Logs.verbose: - Logs.warn('compat: the feature cc does not exist anymore (use "c")') - self.features.remove('cc') - self.features.append('c') - if'cstaticlib'in self.features: - if Logs.verbose: - Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")') - self.features.remove('cstaticlib') - self.features.append(('cxx'in self.features)and'cxxstlib'or'cstlib') - if getattr(self,'ccflags',None): - if Logs.verbose: - Logs.warn('compat: "ccflags" was renamed to "cflags"') - self.cflags=self.ccflags - return old_post(self) -TaskGen.task_gen.post=post -def waf_version(*k,**kw): - Logs.warn('wrong version (waf_version was removed in waf 1.6)') -Utils.waf_version=waf_version -import os -@TaskGen.feature('c','cxx','d') -@TaskGen.before('apply_incpaths','propagate_uselib_vars') -@TaskGen.after('apply_link','process_source') -def apply_uselib_local(self): - env=self.env - from waflib.Tools.ccroot import stlink_task - self.uselib=self.to_list(getattr(self,'uselib',[])) - self.includes=self.to_list(getattr(self,'includes',[])) - names=self.to_list(getattr(self,'uselib_local',[])) - get=self.bld.get_tgen_by_name - seen=set([]) - seen_uselib=set([]) - tmp=Utils.deque(names) - if tmp: - if Logs.verbose: - Logs.warn('compat: "uselib_local" is deprecated, replace by "use"') - while tmp: - lib_name=tmp.popleft() - if lib_name in seen: - continue - y=get(lib_name) - y.post() - seen.add(lib_name) - if getattr(y,'uselib_local',None): - for x in self.to_list(getattr(y,'uselib_local',[])): - obj=get(x) - obj.post() - if getattr(obj,'link_task',None): - if not isinstance(obj.link_task,stlink_task): - tmp.append(x) - if getattr(y,'link_task',None): - link_name=y.target[y.target.rfind(os.sep)+1:] - if isinstance(y.link_task,stlink_task): - env.append_value('STLIB',[link_name]) - else: - env.append_value('LIB',[link_name]) - self.link_task.set_run_after(y.link_task) - self.link_task.dep_nodes+=y.link_task.outputs - tmp_path=y.link_task.outputs[0].parent.bldpath() - if not tmp_path in env['LIBPATH']: - env.prepend_value('LIBPATH',[tmp_path]) - for v in self.to_list(getattr(y,'uselib',[])): - if v not in seen_uselib: - seen_uselib.add(v) - if not env['STLIB_'+v]: - if not v in self.uselib: - self.uselib.insert(0,v) - if getattr(y,'export_includes',None): - self.includes.extend(y.to_incnodes(y.export_includes)) -@TaskGen.feature('cprogram','cxxprogram','cstlib','cxxstlib','cshlib','cxxshlib','dprogram','dstlib','dshlib') -@TaskGen.after('apply_link') -def apply_objdeps(self): - names=getattr(self,'add_objects',[]) - if not names: - return - names=self.to_list(names) - get=self.bld.get_tgen_by_name - seen=[] - while names: - x=names[0] - if x in seen: - names=names[1:] - continue - y=get(x) - if getattr(y,'add_objects',None): - added=0 - lst=y.to_list(y.add_objects) - lst.reverse() - for u in lst: - if u in seen:continue - added=1 - names=[u]+names - if added:continue - y.post() - seen.append(x) - for t in getattr(y,'compiled_tasks',[]): - self.link_task.inputs.extend(t.outputs) -@TaskGen.after('apply_link') -def process_obj_files(self): - if not hasattr(self,'obj_files'): - return - for x in self.obj_files: - node=self.path.find_resource(x) - self.link_task.inputs.append(node) -@TaskGen.taskgen_method -def add_obj_file(self,file): - if not hasattr(self,'obj_files'):self.obj_files=[] - if not'process_obj_files'in self.meths:self.meths.append('process_obj_files') - self.obj_files.append(file) -old_define=Configure.ConfigurationContext.__dict__['define'] -@Configure.conf -def define(self,key,val,quote=True,comment=''): - old_define(self,key,val,quote,comment) - if key.startswith('HAVE_'): - self.env[key]=1 -old_undefine=Configure.ConfigurationContext.__dict__['undefine'] -@Configure.conf -def undefine(self,key,comment=''): - old_undefine(self,key,comment) - if key.startswith('HAVE_'): - self.env[key]=0 -def set_incdirs(self,val): - Logs.warn('compat: change "export_incdirs" by "export_includes"') - self.export_includes=val -TaskGen.task_gen.export_incdirs=property(None,set_incdirs) -def install_dir(self,path): - if not path: - return[] - destpath=Utils.subst_vars(path,self.env) - if self.is_install>0: - Logs.info('* creating %s'%destpath) - Utils.check_dir(destpath) - elif self.is_install<0: - Logs.info('* removing %s'%destpath) - try: - os.remove(destpath) - except OSError: - pass -Build.BuildContext.install_dir=install_dir -repl={'apply_core':'process_source','apply_lib_vars':'process_source','apply_obj_vars':'propagate_uselib_vars','exec_rule':'process_rule'} -def after(*k): - k=[repl.get(key,key)for key in k] - return TaskGen.after_method(*k) -def before(*k): - k=[repl.get(key,key)for key in k] - return TaskGen.before_method(*k) -TaskGen.before=before diff --git a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py b/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py deleted file mode 100644 index 1721c353..00000000 --- a/.waf3-1.8.18-4bb74d2af0d005ad4420ee36f19f050a/waflib/fixpy2.py +++ /dev/null @@ -1,53 +0,0 @@ -#! /usr/bin/env python -# encoding: utf-8 -# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file - -import os -all_modifs={} -def fixdir(dir): - global all_modifs - for k in all_modifs: - for v in all_modifs[k]: - modif(os.path.join(dir,'waflib'),k,v) -def modif(dir,name,fun): - if name=='*': - lst=[] - for y in'. Tools extras'.split(): - for x in os.listdir(os.path.join(dir,y)): - if x.endswith('.py'): - lst.append(y+os.sep+x) - for x in lst: - modif(dir,x,fun) - return - filename=os.path.join(dir,name) - f=open(filename,'r') - try: - txt=f.read() - finally: - f.close() - txt=fun(txt) - f=open(filename,'w') - try: - f.write(txt) - finally: - f.close() -def subst(*k): - def do_subst(fun): - global all_modifs - for x in k: - try: - all_modifs[x].append(fun) - except KeyError: - all_modifs[x]=[fun] - return fun - return do_subst -@subst('*') -def r1(code): - code=code.replace('as e:',',e:') - code=code.replace(".decode(sys.stdout.encoding or 'iso8859-1')",'') - code=code.replace('.encode()','') - return code -@subst('Runner.py') -def r4(code): - code=code.replace('next(self.biter)','self.biter.next()') - return code From 2ea4a5d947ca798943a54a6586db38fb39ec57e5 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Wed, 20 Mar 2019 11:29:24 +0100 Subject: [PATCH 06/22] get_speed with bulk_read implemented for MX,XM,XL models --- src/dynamixel/servos/servo.hpp | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index e5a1cf08..3ee3f1c5 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -372,19 +372,45 @@ namespace dynamixel { // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same template - static InstructionPacket get_current_speed(const std::vector& ids) + static InstructionPacket get_current_speed_MX(const std::vector& ids) + { + //std::vector address; + std::vector address; //uint8_t + std::vector data_length; //uint8_t + for (size_t i = 0; i < ids.size(); i++) { + address.push_back(0x26); // 0x27 for XL and 0x26 for MX + data_length.push_back(0x02); + } + return bulk_read_t(_get_typed(ids), address, data_length); + } + + // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same + template + static InstructionPacket get_current_speed_XM(const std::vector& ids) { //std::vector address; std::vector address; //uint8_t + std::vector data_length; //uint8_t + for (size_t i = 0; i < ids.size(); i++) { + address.push_back(0x80); // 0x80 for XM + data_length.push_back(0x04); + } + return bulk_read_t(_get_typed(ids), address, data_length); + } + // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same + template + static InstructionPacket get_current_speed_XL(const std::vector& ids) + { + //std::vector address; + std::vector address; //uint8_t std::vector data_length; //uint8_t for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x27); // 0x27 for XL and 0x26 for MX + address.push_back(0x27); // 0x27 for XL data_length.push_back(0x02); } return bulk_read_t(_get_typed(ids), address, data_length); } - // ================================================================= // Torque-specific From 27f1c4c9db5c6d9f6a0863447479541ad1dd4b04 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 26 Mar 2019 14:50:40 +0100 Subject: [PATCH 07/22] bulk_write for protocol 2 only added --- src/dynamixel/instructions/bulk_write.hpp | 50 +++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 src/dynamixel/instructions/bulk_write.hpp diff --git a/src/dynamixel/instructions/bulk_write.hpp b/src/dynamixel/instructions/bulk_write.hpp new file mode 100644 index 00000000..ad706dce --- /dev/null +++ b/src/dynamixel/instructions/bulk_write.hpp @@ -0,0 +1,50 @@ +#ifndef DYNAMIXEL_INSTRUCTIONS_BULK_WRITE_HPP_ +#define DYNAMIXEL_INSTRUCTIONS_BULK_WRITE_HPP_ + +#include +#include + +#include "../errors/error.hpp" +#include "../instruction_packet.hpp" + +namespace dynamixel { + namespace instructions { + template + class BulkWrite : public InstructionPacket { + public: + // Only for protocol 2 + BulkWrite(const std::vector& ids, const typename T::address_t& address, + const uint16_t& data_length, const std::vector>& data) + : InstructionPacket(T::broadcast_id, T::Instructions::bulk_write, _get_parameters(ids, address, data_length, data)) {} + + protected: + std::vector _get_parameters(const std::vector& ids, const uint16_t& address, + const uint16_t& data_length, const std::vector>& data) + { + if (ids.size() == 0) + throw errors::Error("BulkWrite: ids vector of size zero"); + + std::vector parameters((5 + data_length) * ids.size() + 5); + size_t curr = 0; + + for (size_t m = 0; m < ids.size(); m++) { + parameters[curr++] = ids[m]; + parameters[curr++] = (uint8_t)(address & 0xFF); + parameters[curr++] = (uint8_t)(address >> 8) & 0xFF; + parameters[curr++] = (uint8_t)(data_length & 0xFF); + parameters[curr++] = (uint8_t)(data_length >> 8) & 0xFF; + if (data[m].size() != data_length) + throw errors::Error("BulkWrite: mismatch between declared" + "data length and data vector size"); + for (size_t i = 0; i < data[m].size(); ++i) { + parameters[curr++] = data[m][i]; + } + } + + return parameters; + } + }; + } // namespace instructions +} // namespace dynamixel + +#endif From 78cd3e889f72fff4ac21436a73c06add0a02b3cb Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 26 Mar 2019 14:51:04 +0100 Subject: [PATCH 08/22] sync_read for protocol 2 only added --- src/dynamixel/instructions/sync_read.hpp | 44 ++++++++++++++++++++++++ 1 file changed, 44 insertions(+) create mode 100644 src/dynamixel/instructions/sync_read.hpp diff --git a/src/dynamixel/instructions/sync_read.hpp b/src/dynamixel/instructions/sync_read.hpp new file mode 100644 index 00000000..84475d90 --- /dev/null +++ b/src/dynamixel/instructions/sync_read.hpp @@ -0,0 +1,44 @@ +#ifndef DYNAMIXEL_INSTRUCTIONS_SYNC_READ_HPP_ +#define DYNAMIXEL_INSTRUCTIONS_SYNC_READ_HPP_ + +#include +#include + +#include "../errors/error.hpp" +#include "../instruction_packet.hpp" + +namespace dynamixel { + namespace instructions { + template + class SyncRead : public InstructionPacket { + public: + SyncRead(typename T::address_t address, const std::vector& ids, uint16_t data_length) + : InstructionPacket(T::broadcast_id, T::Instructions::sync_read, _get_parameters(address, ids, data_length)) {} + + protected: + std::vector _get_parameters(uint16_t address, const std::vector& ids, + uint16_t data_length) + { + if (ids.size() == 0) + throw errors::Error("SyncRead: ids vector of size zero"); + + std::vector parameters(ids.size() + 9); + + parameters[0] = (uint8_t)(address[m] & 0xFF); + parameters[1] = (uint8_t)(address[m] >> 8) & 0xFF; + parameters[2] = (uint8_t)(data_length[m] & 0xFF); + parameters[3] = (uint8_t)(data_length[m] >> 8) & 0xFF; + + size_t curr = 4; + + for (size_t i = 0; i < ids.size(); ++i) { + parameters[curr++] = ids[i]; + } + + return parameters; + } + }; + } // namespace instructions +} // namespace dynamixel + +#endif From f81be4c062d949b99af0e02f9093e333d0c79bd7 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 26 Mar 2019 14:54:50 +0100 Subject: [PATCH 09/22] baudrate list updated, operating mode list updated --- src/dynamixel/baudrate.hpp | 12 +++++++++--- src/dynamixel/instructions/bulk_read.hpp | 2 +- src/dynamixel/operating_mode.hpp | 7 +++++++ src/dynamixel/servos/servo.hpp | 4 ++++ 4 files changed, 21 insertions(+), 4 deletions(-) diff --git a/src/dynamixel/baudrate.hpp b/src/dynamixel/baudrate.hpp index 18d4c7c0..a83e44e6 100644 --- a/src/dynamixel/baudrate.hpp +++ b/src/dynamixel/baudrate.hpp @@ -1,11 +1,11 @@ #ifndef DYNAMIXEL_BAUDRATE_HPP_ #define DYNAMIXEL_BAUDRATE_HPP_ -#include #include +#include -#include "protocols.hpp" #include "errors/error.hpp" +#include "protocols.hpp" namespace dynamixel { /** The template specifications of this method are used to set the baudrate @@ -45,6 +45,12 @@ namespace dynamixel { return 3; case 1000000: return 1; + case 2250000: + return 250; + case 2500000: + return 251; + case 3000000: + return 252; default: std::stringstream err_message; err_message << "Invalid baudrate for protocol 1: " << baudrate; @@ -80,6 +86,6 @@ namespace dynamixel { throw errors::Error(err_message.str()); } } -} +} // namespace dynamixel #endif diff --git a/src/dynamixel/instructions/bulk_read.hpp b/src/dynamixel/instructions/bulk_read.hpp index 8891bfd9..5d150a3f 100644 --- a/src/dynamixel/instructions/bulk_read.hpp +++ b/src/dynamixel/instructions/bulk_read.hpp @@ -116,7 +116,7 @@ namespace dynamixel { std::vector lengths) { if (ids.size() == 0) - throw errors::Error("SyncWrite: ids vector of size zero"); + throw errors::Error("BulkRead: ids vector of size zero"); if (ids.size() != addresses.size()) throw errors::Error("BulkRead: mismatching size for ids and addresses"); if (ids.size() != lengths.size()) diff --git a/src/dynamixel/operating_mode.hpp b/src/dynamixel/operating_mode.hpp index e7b032eb..13a47e59 100644 --- a/src/dynamixel/operating_mode.hpp +++ b/src/dynamixel/operating_mode.hpp @@ -85,6 +85,10 @@ namespace dynamixel { return OperatingMode::joint; else if (4 == mode) return OperatingMode::multi_turn; + else if (5 == mode) + return OperatingMode::torque; + else if (16 == mode) + return OperatingMode::voltage; else return OperatingMode::unknown; } @@ -120,6 +124,9 @@ namespace dynamixel { case OperatingMode::multi_turn: return "multi_turn"; break; + case OperatingMode::voltage: + return "PWM (voltage)"; + break; case OperatingMode::unknown: default: return "unknown"; diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 3ee3f1c5..174e0adb 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -9,11 +9,13 @@ #include "../instruction_packet.hpp" #include "../instructions/action.hpp" #include "../instructions/bulk_read.hpp" +#include "../instructions/bulk_write.hpp" #include "../instructions/factory_reset.hpp" #include "../instructions/ping.hpp" #include "../instructions/read.hpp" #include "../instructions/reboot.hpp" #include "../instructions/reg_write.hpp" +#include "../instructions/sync_read.hpp" #include "../instructions/sync_write.hpp" #include "../instructions/write.hpp" #include "../status_packet.hpp" @@ -99,8 +101,10 @@ namespace dynamixel { typedef instructions::RegWrite reg_write_t; typedef instructions::Action action_t; typedef instructions::FactoryReset factory_reset_t; + typedef instructions::SyncRead sync_read_t; typedef instructions::SyncWrite sync_write_t; typedef instructions::BulkRead bulk_read_t; + typedef instructions::BulkWrite bulk_write_t; long long int id() const override { From 703aea111edea8e755ab93bbbc44596eda68c231 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Mon, 1 Apr 2019 11:12:29 +0200 Subject: [PATCH 10/22] minor update - some control mode added and baudrate table updated --- src/demos/operating_mode.cpp | 7 +++++-- src/dynamixel/instructions/sync_read.hpp | 8 ++++---- src/dynamixel/operating_mode.hpp | 2 +- src/dynamixel/servos/base_servo.hpp | 1 + src/dynamixel/servos/servo.hpp | 14 ++++++-------- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/src/demos/operating_mode.cpp b/src/demos/operating_mode.cpp index 7bc79ef5..dd0bd2e9 100644 --- a/src/demos/operating_mode.cpp +++ b/src/demos/operating_mode.cpp @@ -1,9 +1,9 @@ #include +#include #include #include -#include using namespace dynamixel; using namespace servos; @@ -26,6 +26,9 @@ std::string string_operating_mode(Usb2Dynamixel& controller, typename Protocol:: case OperatingMode::multi_turn: str_mode = "multi-turn"; break; + case OperatingMode::voltage: + str_mode = "voltage"; + break; default: str_mode = "unknown"; } @@ -87,4 +90,4 @@ int main(int argc, char** argv) } return 0; -} \ No newline at end of file +} diff --git a/src/dynamixel/instructions/sync_read.hpp b/src/dynamixel/instructions/sync_read.hpp index 84475d90..b1ed889f 100644 --- a/src/dynamixel/instructions/sync_read.hpp +++ b/src/dynamixel/instructions/sync_read.hpp @@ -24,10 +24,10 @@ namespace dynamixel { std::vector parameters(ids.size() + 9); - parameters[0] = (uint8_t)(address[m] & 0xFF); - parameters[1] = (uint8_t)(address[m] >> 8) & 0xFF; - parameters[2] = (uint8_t)(data_length[m] & 0xFF); - parameters[3] = (uint8_t)(data_length[m] >> 8) & 0xFF; + parameters[0] = (uint8_t)(address & 0xFF); + parameters[1] = (uint8_t)(address >> 8) & 0xFF; + parameters[2] = (uint8_t)(data_length & 0xFF); + parameters[3] = (uint8_t)(data_length >> 8) & 0xFF; size_t curr = 4; diff --git a/src/dynamixel/operating_mode.hpp b/src/dynamixel/operating_mode.hpp index 13a47e59..0bfc28f6 100644 --- a/src/dynamixel/operating_mode.hpp +++ b/src/dynamixel/operating_mode.hpp @@ -125,7 +125,7 @@ namespace dynamixel { return "multi_turn"; break; case OperatingMode::voltage: - return "PWM (voltage)"; + return "voltage"; break; case OperatingMode::unknown: default: diff --git a/src/dynamixel/servos/base_servo.hpp b/src/dynamixel/servos/base_servo.hpp index c53e60df..95cb357f 100644 --- a/src/dynamixel/servos/base_servo.hpp +++ b/src/dynamixel/servos/base_servo.hpp @@ -31,6 +31,7 @@ namespace dynamixel { wheel, joint, multi_turn, + voltage, unknown }; namespace servos { diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 174e0adb..02990f60 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -388,13 +388,12 @@ namespace dynamixel { return bulk_read_t(_get_typed(ids), address, data_length); } - // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same + // Bulk operations. Only works for XM models with protocol 2. Only works if the models are known and they are all the same template static InstructionPacket get_current_speed_XM(const std::vector& ids) { - //std::vector address; - std::vector address; //uint8_t - std::vector data_length; //uint8_t + std::vector address; + std::vector data_length; for (size_t i = 0; i < ids.size(); i++) { address.push_back(0x80); // 0x80 for XM data_length.push_back(0x04); @@ -402,13 +401,12 @@ namespace dynamixel { return bulk_read_t(_get_typed(ids), address, data_length); } - // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same + // Bulk operations. Only works for XL models with protocol 2. Only works if the models are known and they are all the same template static InstructionPacket get_current_speed_XL(const std::vector& ids) { - //std::vector address; - std::vector address; //uint8_t - std::vector data_length; //uint8_t + std::vector address; + std::vector data_length; for (size_t i = 0; i < ids.size(); i++) { address.push_back(0x27); // 0x27 for XL data_length.push_back(0x02); From 52e16f79c89e29abf64a201026f5c97b3812125b Mon Sep 17 00:00:00 2001 From: Dorian Goepp Date: Tue, 2 Apr 2019 21:18:59 +0200 Subject: [PATCH 11/22] restore and rename the new version of set_goal_positions --- src/dynamixel/servos/servo.hpp | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 02990f60..36d4a9d2 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -229,17 +229,31 @@ namespace dynamixel { // Sync operations. Only works if the models are known and they are all the same // use case : std::make_shared(0)->set_goal_positions(ids, angles)); replace MODEL_SERVO by Mx28 or Xl320... + template + static InstructionPacket set_goal_positions_angle(const std::vector& ids, const std::vector& positions) + { + if (ids.size() != positions.size()) + throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); + // Convert from radians to ticks (after going through degrees) + std::vector tick_positions; + for (auto pos : positions) + tick_positions.push_back((((pos * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); + // Pack the angles into bytes (for sending) + std::vector> packed(tick_positions.size()); + for (auto tick_pos : tick_positions) + packed.push_back(protocol_t::pack_data((typename ct_t::goal_position_t)tick_pos)); + + return sync_write_t(ct_t::goal_position, _get_typed(ids), packed); + } + template static InstructionPacket set_goal_positions(const std::vector& ids, const std::vector& pos) { - std::vector final_pos; - for (size_t j = 0; j < pos.size(); j++) - final_pos.push_back((((pos[j] * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); - if (ids.size() != final_pos.size()) + if (ids.size() != pos.size()) throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); - std::vector> packed(final_pos.size()); - for (size_t i = 0; i < final_pos.size(); i++) - packed[i] = protocol_t::pack_data((typename ct_t::goal_position_t)final_pos[i]); + std::vector> packed(pos.size()); + for (size_t i = 0; i < pos.size(); i++) + packed[i] = protocol_t::pack_data((typename ct_t::goal_position_t)pos[i]); return sync_write_t(ct_t::goal_position, _get_typed(ids), packed); } From ca8b2a5fc0c0eb3b08a0686b49df7921c1d73391 Mon Sep 17 00:00:00 2001 From: Dorian Goepp Date: Sat, 6 Apr 2019 14:44:07 +0200 Subject: [PATCH 12/22] use angle version of set_goal_positions --- src/tools/utility.hpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/utility.hpp b/src/tools/utility.hpp index fd485320..c4d3685e 100644 --- a/src/tools/utility.hpp +++ b/src/tools/utility.hpp @@ -434,7 +434,7 @@ namespace dynamixel { "vectors of IDs and angles should have " "the same length"); _serial_interface.send( - std::make_shared(0)->set_goal_positions(ids, angles)); //Mx28 + std::make_shared(0)->set_goal_positions_angle(ids, angles)); //Mx28 StatusPacket status; for (int i = 0; i < ids.size(); i++) { From 380319afa4bb3b03db41c06d60141d8805989bf3 Mon Sep 17 00:00:00 2001 From: Dorian Goepp Date: Sat, 6 Apr 2019 16:27:41 +0200 Subject: [PATCH 13/22] Add two error types for vector sizes --- src/dynamixel/errors/bad_packet.hpp | 6 +-- src/dynamixel/errors/crc_error.hpp | 11 +++-- src/dynamixel/errors/error.hpp | 4 +- src/dynamixel/errors/servo_limit_error.hpp | 8 +-- src/dynamixel/errors/status_error.hpp | 6 +-- src/dynamixel/errors/unpack_error.hpp | 4 +- src/dynamixel/errors/vector_size_errors.hpp | 55 +++++++++++++++++++++ src/dynamixel/instructions/bulk_read.hpp | 12 ++--- src/dynamixel/instructions/bulk_write.hpp | 4 +- src/dynamixel/instructions/sync_read.hpp | 4 +- src/dynamixel/instructions/sync_write.hpp | 10 ++-- src/dynamixel/servos/servo.hpp | 23 ++++----- 12 files changed, 102 insertions(+), 45 deletions(-) create mode 100644 src/dynamixel/errors/vector_size_errors.hpp diff --git a/src/dynamixel/errors/bad_packet.hpp b/src/dynamixel/errors/bad_packet.hpp index c45995e2..c17b03b7 100644 --- a/src/dynamixel/errors/bad_packet.hpp +++ b/src/dynamixel/errors/bad_packet.hpp @@ -1,8 +1,8 @@ #ifndef DYNAMIXEL_BAD_PACKET_ERROR_HPP_ #define DYNAMIXEL_BAD_PACKET_ERROR_HPP_ -#include #include +#include #include "error.hpp" @@ -39,7 +39,7 @@ namespace dynamixel { private: const std::vector _packet; }; - } -} + } // namespace errors +} // namespace dynamixel #endif diff --git a/src/dynamixel/errors/crc_error.hpp b/src/dynamixel/errors/crc_error.hpp index 6be126ee..2cc3cb42 100644 --- a/src/dynamixel/errors/crc_error.hpp +++ b/src/dynamixel/errors/crc_error.hpp @@ -1,9 +1,9 @@ #ifndef DYNAMIXEL_ERRORS_CRC_ERROR_HPP_ #define DYNAMIXEL_ERRORS_CRC_ERROR_HPP_ -#include -#include #include +#include +#include #include "error.hpp" @@ -11,7 +11,8 @@ namespace dynamixel { namespace errors { class CrcError : public Error { public: - CrcError(uint8_t id, uint8_t protocol, uint32_t expected, uint32_t received) : _id(id), _protocol(protocol), _expected(expected), _received(received) + CrcError(uint8_t id, uint8_t protocol, uint32_t expected, uint32_t received) + : _id(id), _protocol(protocol), _expected(expected), _received(received) { std::stringstream err_message; err_message << "Status: checksum error while decoding packet with ID " << (int)id; @@ -44,7 +45,7 @@ namespace dynamixel { uint8_t _id, _protocol; uint32_t _expected, _received; }; - } -} + } // namespace errors +} // namespace dynamixel #endif diff --git a/src/dynamixel/errors/error.hpp b/src/dynamixel/errors/error.hpp index 70af8bd1..044ebdbe 100644 --- a/src/dynamixel/errors/error.hpp +++ b/src/dynamixel/errors/error.hpp @@ -47,8 +47,8 @@ namespace dynamixel { { return err.print(os); } - } -} + } // namespace errors +} // namespace dynamixel #define CHECK(val, msg) check(__FILE__, __LINE__, (val), msg) #endif diff --git a/src/dynamixel/errors/servo_limit_error.hpp b/src/dynamixel/errors/servo_limit_error.hpp index 5ae063c1..633e698d 100644 --- a/src/dynamixel/errors/servo_limit_error.hpp +++ b/src/dynamixel/errors/servo_limit_error.hpp @@ -1,9 +1,9 @@ #ifndef DYNAMIXEL_ERRORS_SERVO_LIMIT_ERROR_HPP_ #define DYNAMIXEL_ERRORS_SERVO_LIMIT_ERROR_HPP_ -#include -#include #include +#include +#include #include "error.hpp" @@ -46,7 +46,7 @@ namespace dynamixel { int _id; double _max, _min, _value; }; - } -} + } // namespace errors +} // namespace dynamixel #endif diff --git a/src/dynamixel/errors/status_error.hpp b/src/dynamixel/errors/status_error.hpp index 8fa074d6..83c605cd 100644 --- a/src/dynamixel/errors/status_error.hpp +++ b/src/dynamixel/errors/status_error.hpp @@ -1,8 +1,8 @@ #ifndef DYNAMIXEL_STATUS_ERROR_HPP_ #define DYNAMIXEL_STATUS_ERROR_HPP_ -#include #include +#include #include "error.hpp" @@ -38,7 +38,7 @@ namespace dynamixel { private: uint8_t _id, _protocol, _error_byte; }; - } -} + } // namespace errors +} // namespace dynamixel #endif diff --git a/src/dynamixel/errors/unpack_error.hpp b/src/dynamixel/errors/unpack_error.hpp index 0c0d0df5..f4411765 100644 --- a/src/dynamixel/errors/unpack_error.hpp +++ b/src/dynamixel/errors/unpack_error.hpp @@ -1,9 +1,9 @@ #ifndef DYNAMIXEL_ERRORS_UNPACK_ERROR_HPP_ #define DYNAMIXEL_ERRORS_UNPACK_ERROR_HPP_ -#include -#include #include +#include +#include #include "error.hpp" diff --git a/src/dynamixel/errors/vector_size_errors.hpp b/src/dynamixel/errors/vector_size_errors.hpp new file mode 100644 index 00000000..cd655bc4 --- /dev/null +++ b/src/dynamixel/errors/vector_size_errors.hpp @@ -0,0 +1,55 @@ +#ifndef DYNAMIXEL_VECTOR_SIZE_ERROR_HPP_ +#define DYNAMIXEL_VECTOR_SIZE_ERROR_HPP_ + +#include + +#include "error.hpp" + +namespace dynamixel { + namespace errors { + class VectorEmptyError : public Error { + public: + VectorEmptyError(const std::string& method_name, + const std::string& vector_name) + : _method_name(method_name), _vector_name(vector_name) + { + this->_msg = "The vector " + vector_name + ", passed to " + + method_name + " cannot be empty (size 0)"; + } + + std::string vector_name() const + { + return _vector_name; + } + + std::string method_name() const + { + return _method_name; + } + + protected: + const std::string _method_name, _vector_name; + }; + + class VectorSizesDifferError : public Error { + public: + VectorSizesDifferError(const std::string& method_name, + const std::string& name1, const std::string& name2) + : _method_name(method_name) + { + this->_msg = "The vectors " + name1 + " and " + name2 + + ", passed to " + method_name + " should have the same size."; + } + + std::string method_name() const + { + return _method_name; + } + + protected: + const std::string _method_name; + }; + } // namespace errors +} // namespace dynamixel + +#endif diff --git a/src/dynamixel/instructions/bulk_read.hpp b/src/dynamixel/instructions/bulk_read.hpp index 5d150a3f..edd52dce 100644 --- a/src/dynamixel/instructions/bulk_read.hpp +++ b/src/dynamixel/instructions/bulk_read.hpp @@ -4,7 +4,7 @@ #include #include -#include "../errors/error.hpp" +#include "../errors/vector_size_errors.hpp" #include "../instruction_packet.hpp" namespace dynamixel { @@ -54,7 +54,7 @@ namespace dynamixel { const uint8_t& data_length) { if (ids.size() == 0) - throw errors::Error("BulkRead: ids vector of size zero"); + throw errors::VectorEmptyError("BulkRead", "ids"); std::vector parameters(3 * ids.size() + 3); @@ -95,7 +95,7 @@ namespace dynamixel { const uint16_t& data_length) { if (ids.size() == 0) - throw errors::Error("BulkRead: ids vector of size zero"); + throw errors::VectorEmptyError("BulkRead", "ids"); std::vector parameters(5 * ids.size() + 5); size_t curr = 0; @@ -116,11 +116,11 @@ namespace dynamixel { std::vector lengths) { if (ids.size() == 0) - throw errors::Error("BulkRead: ids vector of size zero"); + throw errors::VectorEmptyError("BulkRead", "ids"); if (ids.size() != addresses.size()) - throw errors::Error("BulkRead: mismatching size for ids and addresses"); + throw errors::VectorSizesDifferError("BulkRead", "ids", "addresses"); if (ids.size() != lengths.size()) - throw errors::Error("BulkRead: mismatching size for ids and lengths/sizes"); + throw errors::VectorSizesDifferError("BulkRead", "ids", "lengths"); } }; } // namespace instructions diff --git a/src/dynamixel/instructions/bulk_write.hpp b/src/dynamixel/instructions/bulk_write.hpp index ad706dce..052d7f50 100644 --- a/src/dynamixel/instructions/bulk_write.hpp +++ b/src/dynamixel/instructions/bulk_write.hpp @@ -4,7 +4,7 @@ #include #include -#include "../errors/error.hpp" +#include "../errors/vector_size_errors.hpp" #include "../instruction_packet.hpp" namespace dynamixel { @@ -22,7 +22,7 @@ namespace dynamixel { const uint16_t& data_length, const std::vector>& data) { if (ids.size() == 0) - throw errors::Error("BulkWrite: ids vector of size zero"); + throw errors::VectorEmptyError("BulkWrite", "ids"); std::vector parameters((5 + data_length) * ids.size() + 5); size_t curr = 0; diff --git a/src/dynamixel/instructions/sync_read.hpp b/src/dynamixel/instructions/sync_read.hpp index b1ed889f..e3e450a6 100644 --- a/src/dynamixel/instructions/sync_read.hpp +++ b/src/dynamixel/instructions/sync_read.hpp @@ -4,7 +4,7 @@ #include #include -#include "../errors/error.hpp" +#include "../errors/vector_size_errors.hpp" #include "../instruction_packet.hpp" namespace dynamixel { @@ -20,7 +20,7 @@ namespace dynamixel { uint16_t data_length) { if (ids.size() == 0) - throw errors::Error("SyncRead: ids vector of size zero"); + throw errors::VectorEmptyError("SyncRead", "ids"); std::vector parameters(ids.size() + 9); diff --git a/src/dynamixel/instructions/sync_write.hpp b/src/dynamixel/instructions/sync_write.hpp index 187e293c..2ef73e6e 100644 --- a/src/dynamixel/instructions/sync_write.hpp +++ b/src/dynamixel/instructions/sync_write.hpp @@ -4,7 +4,7 @@ #include #include -#include "../errors/error.hpp" +#include "../errors/vector_size_errors.hpp" #include "../instruction_packet.hpp" namespace dynamixel { @@ -21,9 +21,9 @@ namespace dynamixel { const std::vector>& data) { if (ids.size() == 0) - throw errors::Error("SyncWrite: ids vector of size zero"); + throw errors::VectorEmptyError("SyncWrite", "ids"); if (ids.size() != data.size()) - throw errors::Error("SyncWrite: mismatching vectors size for ids and data"); + throw errors::VectorSizesDifferError("SyncWrite", "ids", "data"); typename T::length_t data_length = data[0].size(); std::vector parameters((data_length + 1) * ids.size() + 2); @@ -50,9 +50,9 @@ namespace dynamixel { const std::vector>& data) { if (ids.size() == 0) - throw errors::Error("SyncWrite: ids vector of size zero"); + throw errors::VectorEmptyError("SyncWrite", "ids"); if (ids.size() != data.size()) - throw errors::Error("SyncWrite: mismatching vectors size for ids and data"); + throw errors::VectorSizesDifferError("SyncWrite", "ids", "data"); typename T::length_t data_length = data[0].size(); std::vector parameters((data_length + 1) * ids.size() + 4); diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 36d4a9d2..dd4ecf90 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -6,6 +6,7 @@ #include "../errors/error.hpp" #include "../errors/servo_limit_error.hpp" +#include "../errors/vector_size_errors.hpp" #include "../instruction_packet.hpp" #include "../instructions/action.hpp" #include "../instructions/bulk_read.hpp" @@ -230,17 +231,17 @@ namespace dynamixel { // Sync operations. Only works if the models are known and they are all the same // use case : std::make_shared(0)->set_goal_positions(ids, angles)); replace MODEL_SERVO by Mx28 or Xl320... template - static InstructionPacket set_goal_positions_angle(const std::vector& ids, const std::vector& positions) + static InstructionPacket set_goal_positions_angle(const std::vector& ids, const std::vector& angles) { - if (ids.size() != positions.size()) - throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); + if (ids.size() != angles.size()) + throw errors::VectorSizesDifferError("set_goal_positions_angle", "ids", "angles"); // Convert from radians to ticks (after going through degrees) - std::vector tick_positions; - for (auto pos : positions) - tick_positions.push_back((((pos * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); + std::vector positions; + for (auto pos : angles) + positions.push_back((((pos * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); // Pack the angles into bytes (for sending) - std::vector> packed(tick_positions.size()); - for (auto tick_pos : tick_positions) + std::vector> packed(positions.size()); + for (auto tick_pos : positions) packed.push_back(protocol_t::pack_data((typename ct_t::goal_position_t)tick_pos)); return sync_write_t(ct_t::goal_position, _get_typed(ids), packed); @@ -250,7 +251,7 @@ namespace dynamixel { static InstructionPacket set_goal_positions(const std::vector& ids, const std::vector& pos) { if (ids.size() != pos.size()) - throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); + throw errors::VectorSizesDifferError("set_goal_positions", "ids", "pos"); std::vector> packed(pos.size()); for (size_t i = 0; i < pos.size(); i++) packed[i] = protocol_t::pack_data((typename ct_t::goal_position_t)pos[i]); @@ -352,7 +353,7 @@ namespace dynamixel { static InstructionPacket set_moving_speeds(const std::vector& ids, const std::vector& speeds, OperatingMode operating_mode) { if (ids.size() != speeds.size()) - throw errors::Error("Instruction: error when setting moving speeds: \n\tMismatch in vector size for ids and speeds"); + throw errors::VectorSizesDifferError("set_moving_speeds", "ids", "speeds"); std::vector speed_ticks; // convert radians per second to ticks for (int i = 0; i < speeds.size(); i++) @@ -434,7 +435,7 @@ namespace dynamixel { static InstructionPacket set_torque_limits(const std::vector& ids, const std::vector& torque_limits) { if (ids.size() != torque_limits.size()) - throw errors::Error("Instruction: error when setting torque limits: \n\tMismatch in vector size for ids and torques"); + throw errors::VectorSizesDifferError("set_torque_limits", "ids", "torque_limits"); std::vector> packed(torque_limits.size()); for (size_t i = 0; i < torque_limits.size(); i++) packed[i] = protocol_t::pack_data((typename ct_t::torque_limit_t)torque_limits[i]); From 5a7d828e945dac116e676a930f18ab0e7900ecf4 Mon Sep 17 00:00:00 2001 From: PedroDesRobots Date: Fri, 14 Jun 2019 17:49:26 +0200 Subject: [PATCH 14/22] 'echo value' changed (4ms -> 0ms) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6c5e6c3b..15d892c8 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ If you want your USB2AX serial interface to appear in `/dev` as `usb2axN` (where If the reading time seems too long, check the value of the USB latency timer. On ubuntu this value is retrieved with this command `cat /sys/bus/usb-serial/devices/ttyUSB0/latency_timer`. It is the time, in milliseconds, for which the device driver buffers data before making it available. -You can change this timer with the command `echo 4 | sudo tee /sys/bus/usb-serial/devices/ttyUSB0/latency_timer` which sets it to 4 ms for the device `/dev/ttyUSB0`. +You can change this timer with the command `echo 0 | sudo tee /sys/bus/usb-serial/devices/ttyUSB0/latency_timer` which sets it to 4 ms for the device `/dev/ttyUSB0`. ## Using Libdynamixel on Mac From 8c147a041f5c4c233b008e04baee6b03386a36ca Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Fri, 14 Jun 2019 17:51:30 +0200 Subject: [PATCH 15/22] add bulk_read sync_write function with model abstraction - IN PROGRESS --- src/demos/wscript | 1 - src/dynamixel/servos/base_servo.hpp | 10 ++++ src/dynamixel/servos/servo.hpp | 78 +++++++++++++++-------------- src/tools/utility.hpp | 25 ++++++--- 4 files changed, 68 insertions(+), 46 deletions(-) diff --git a/src/demos/wscript b/src/demos/wscript index 683d4a2e..cf785b27 100644 --- a/src/demos/wscript +++ b/src/demos/wscript @@ -4,6 +4,5 @@ def build(bld): bld(features='cxx cxxprogram', source='dynamixel_simple_check.cpp', target="dynamixel_simple_check", includes=". ..") - bld(features='cxx cxxprogram', source='dynamixel_test.cpp', target="dynamixel_test", includes=". ..") bld(features='cxx cxxprogram', source='omnipointer_arm.cpp', target="omnipointer_arm", includes=". ..") bld(features='cxx cxxprogram', source='operating_mode.cpp', target="operating_mode", includes=". ..") diff --git a/src/dynamixel/servos/base_servo.hpp b/src/dynamixel/servos/base_servo.hpp index 95cb357f..186b739c 100644 --- a/src/dynamixel/servos/base_servo.hpp +++ b/src/dynamixel/servos/base_servo.hpp @@ -194,6 +194,16 @@ namespace dynamixel { throw errors::Error("parse_present_position_angle not implemented in model"); } + virtual InstructionPacket sync_goal_position_angle(const std::vector& ids, const std::vector& pos) const + { + throw errors::Error("sync_goal_position_angle not implemented in model"); + } + + virtual InstructionPacket bulk_read_position_angle(const std::vector& ids) const + { + throw errors::Error("bulk_read_position_angle not implemented in model"); + } + // virtual InstructionPacket get_current_positions_all(std::vector ids) const // { // throw errors::Error("get_current_positions_all not implemented in model"); diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 02990f60..af42246e 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -1,9 +1,6 @@ #ifndef DYNAMIXEL_SERVOS_SERVO_HPP_ #define DYNAMIXEL_SERVOS_SERVO_HPP_ -#include -#include - #include "../errors/error.hpp" #include "../errors/servo_limit_error.hpp" #include "../instruction_packet.hpp" @@ -22,6 +19,9 @@ #include "base_servo.hpp" #include "model_traits.hpp" #include "protocol_specific_packets.hpp" +#include +#include +#include #define MODEL_NAME(Name) \ std::string model_name() const override \ @@ -101,6 +101,7 @@ namespace dynamixel { typedef instructions::RegWrite reg_write_t; typedef instructions::Action action_t; typedef instructions::FactoryReset factory_reset_t; + typedef instructions::Reboot reboot_t; typedef instructions::SyncRead sync_read_t; typedef instructions::SyncWrite sync_write_t; typedef instructions::BulkRead bulk_read_t; @@ -175,6 +176,33 @@ namespace dynamixel { return reg_goal_position(id, pos); } + // template + static inline InstructionPacket sync_goal_position_angle(typename Servo::protocol_t::id_t id, const std::vector& ids, const std::vector& pos) + { + std::vector final_pos; + for (size_t j = 0; j < pos.size(); j++) + final_pos.push_back((((pos[j] * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); + if (ids.size() != final_pos.size()) + throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); + std::vector> packed(final_pos.size()); + for (size_t i = 0; i < final_pos.size(); i++) + packed[i] = protocol_t::pack_data((typename ct_t::goal_position_t)final_pos[i]); + + return sync_write_t(ct_t::goal_position, ids, packed); + } + + static inline InstructionPacket bulk_read_position_angle(typename Servo::protocol_t::id_t id, const std::vector& ids) + { + std::vector::protocol_t::address_t> address; + std::vector::protocol_t::length_t> data_length; + typename Servo::protocol_t::address_t present_position_address = ct_t::present_position; + typename ct_t::present_position_t type_address; + for (size_t i = 0; i < ids.size(); i++) { + address.push_back(present_position_address); + data_length.push_back(sizeof(type_address)); + } + return bulk_read_t(ids, address, data_length); + } InstructionPacket set_goal_position_angle(double rad) const override { @@ -186,6 +214,15 @@ namespace dynamixel { return Model::reg_goal_position_angle(this->_id, rad); } + InstructionPacket sync_goal_position_angle(const std::vector& ids, const std::vector& pos) const override + { + return Model::sync_goal_position_angle(this->_id, ids, pos); + } + + InstructionPacket bulk_read_position_angle(const std::vector& ids) const override + { + return Model::bulk_read_position_angle(this->_id, ids); + } static InstructionPacket::protocol_t> get_present_position_angle(typename Servo::protocol_t::id_t id) { return get_present_position(id); @@ -210,40 +247,6 @@ namespace dynamixel { return Model::parse_present_position_angle(this->_id, st); } - // static InstructionPacket::protocol_t> get_current_positions_all(typename Servo::protocol_t::id_t id, const std::vector ids) - // { - // typename Servo::ct_t::present_position_t pos; - // std::vector address; - // std::vector data_length; - // for (size_t i = 0; i < ids.size(); i++) { - // address.push_back(pos); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - // data_length.push_back(sizeof(pos)); - // } - // return bulk_read_t(_get_typed(ids), address, data_length); - // } - // - // InstructionPacket::protocol_t> get_current_positions_all(const std::vector ids) const override - // { - // return Model::get_current_positions_all(this->_id, ids); - // } - - // Sync operations. Only works if the models are known and they are all the same - // use case : std::make_shared(0)->set_goal_positions(ids, angles)); replace MODEL_SERVO by Mx28 or Xl320... - template - static InstructionPacket set_goal_positions(const std::vector& ids, const std::vector& pos) - { - std::vector final_pos; - for (size_t j = 0; j < pos.size(); j++) - final_pos.push_back((((pos[j] * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); - if (ids.size() != final_pos.size()) - throw errors::Error("Instruction: error when setting goal positions: \n\tMismatch in vector size for ids and positions"); - std::vector> packed(final_pos.size()); - for (size_t i = 0; i < final_pos.size(); i++) - packed[i] = protocol_t::pack_data((typename ct_t::goal_position_t)final_pos[i]); - - return sync_write_t(ct_t::goal_position, _get_typed(ids), packed); - } - // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same template static InstructionPacket get_current_positions_MX(const std::vector& ids) @@ -444,6 +447,7 @@ namespace dynamixel { } typename protocol_t::id_t _id; + }; // namespace servos } // namespace servos } // namespace dynamixel diff --git a/src/tools/utility.hpp b/src/tools/utility.hpp index fd485320..48342f3e 100644 --- a/src/tools/utility.hpp +++ b/src/tools/utility.hpp @@ -433,9 +433,13 @@ namespace dynamixel { throw errors::UtilityError("set_position(vector, vector): the " "vectors of IDs and angles should have " "the same length"); - _serial_interface.send( - std::make_shared(0)->set_goal_positions(ids, angles)); //Mx28 - + std::vector ids_cast; + for (int i = 0; i < ids.size(); i++) { + ids_cast.push_back((uint8_t)ids[i]); + } + // _serial_interface.send( + // std::make_shared(0)->set_goal_positions(ids, angles)); //Mx28 + _serial_interface.send(_servos.at(ids[0])->sync_goal_position_angle(ids_cast, angles)); StatusPacket status; for (int i = 0; i < ids.size(); i++) { _serial_interface.recv(status); @@ -529,12 +533,17 @@ namespace dynamixel { ids.push_back(servo.first); } - for (auto servo : _servos) { - _serial_interface.send( - std::make_shared(0)->get_current_positions_XL(ids)); - // servo.second->get_current_positions_all(ids)); - break; + std::vector ids_cast; + for (int i = 0; i < ids.size(); i++) { + ids_cast.push_back((uint8_t)ids[i]); } + // _servos.at(ids[0])-> + // _serial_interface.send(_servos.at(ids[0])->sync_goal_position_angle(ids_cast, angles)); + // for (auto servo : _servos) { + _serial_interface.send(_servos.at(ids[0])->bulk_read_position_angle(ids_cast)); + // std::make_shared(0)->get_current_positions_XL(ids)); + // break; + // } StatusPacket status; for (auto servo : _servos) { From fe7cea4d309166c3c13ccf1108524f17cedce4b1 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Fri, 14 Jun 2019 21:31:13 +0200 Subject: [PATCH 16/22] add bulk_speed sync_speed with model abstraction --- src/dynamixel/controllers/usb2dynamixel.hpp | 2 +- src/dynamixel/servos/base_servo.hpp | 15 +- src/dynamixel/servos/servo.hpp | 154 +++++++++----------- 3 files changed, 79 insertions(+), 92 deletions(-) diff --git a/src/dynamixel/controllers/usb2dynamixel.hpp b/src/dynamixel/controllers/usb2dynamixel.hpp index ed3b6e54..a86bdbdf 100644 --- a/src/dynamixel/controllers/usb2dynamixel.hpp +++ b/src/dynamixel/controllers/usb2dynamixel.hpp @@ -24,7 +24,7 @@ namespace dynamixel { @param error number, usually the value of errno @return std::string of the explanation **/ - std::string write_error_string(int error_number) + inline std::string write_error_string(int error_number) { switch (error_number) { case EAGAIN: diff --git a/src/dynamixel/servos/base_servo.hpp b/src/dynamixel/servos/base_servo.hpp index 186b739c..e273825f 100644 --- a/src/dynamixel/servos/base_servo.hpp +++ b/src/dynamixel/servos/base_servo.hpp @@ -204,11 +204,6 @@ namespace dynamixel { throw errors::Error("bulk_read_position_angle not implemented in model"); } - // virtual InstructionPacket get_current_positions_all(std::vector ids) const - // { - // throw errors::Error("get_current_positions_all not implemented in model"); - // } - // ================================================================= // Speed-specific @@ -222,6 +217,16 @@ namespace dynamixel { throw errors::Error("reg_moving_speed_angle not implemented in model"); } + virtual InstructionPacket sync_moving_speed_angle(const std::vector& ids, const std::vector& rad_per_s, OperatingMode operating_mode = OperatingMode::joint) const + { + throw errors::Error("sync_moving_speed_angle not implemented in model"); + } + + virtual InstructionPacket bulk_read_speed_angle(const std::vector& ids) const + { + throw errors::Error("bulk_read_speed_angle not implemented in model"); + } + virtual double parse_joint_speed(const StatusPacket& st) const { throw errors::Error("parse_joint_speed not implemented in model"); diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index af42246e..9632912e 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -149,7 +149,6 @@ namespace dynamixel { // ================================================================= // Position-specific - static inline InstructionPacket set_goal_position_angle(typename Servo::protocol_t::id_t id, double rad) { double deg = rad * 57.2958; @@ -163,6 +162,10 @@ namespace dynamixel { return set_goal_position(id, pos); } + InstructionPacket set_goal_position_angle(double rad) const override + { + return Model::set_goal_position_angle(this->_id, rad); + } static inline InstructionPacket reg_goal_position_angle(typename Servo::protocol_t::id_t id, double rad) { double deg = rad * 57.2958; @@ -176,6 +179,11 @@ namespace dynamixel { return reg_goal_position(id, pos); } + + InstructionPacket reg_goal_position_angle(double rad) const override + { + return Model::reg_goal_position_angle(this->_id, rad); + } // template static inline InstructionPacket sync_goal_position_angle(typename Servo::protocol_t::id_t id, const std::vector& ids, const std::vector& pos) { @@ -191,6 +199,11 @@ namespace dynamixel { return sync_write_t(ct_t::goal_position, ids, packed); } + InstructionPacket sync_goal_position_angle(const std::vector& ids, const std::vector& pos) const override + { + return Model::sync_goal_position_angle(this->_id, ids, pos); + } + static inline InstructionPacket bulk_read_position_angle(typename Servo::protocol_t::id_t id, const std::vector& ids) { std::vector::protocol_t::address_t> address; @@ -204,21 +217,6 @@ namespace dynamixel { return bulk_read_t(ids, address, data_length); } - InstructionPacket set_goal_position_angle(double rad) const override - { - return Model::set_goal_position_angle(this->_id, rad); - } - - InstructionPacket reg_goal_position_angle(double rad) const override - { - return Model::reg_goal_position_angle(this->_id, rad); - } - - InstructionPacket sync_goal_position_angle(const std::vector& ids, const std::vector& pos) const override - { - return Model::sync_goal_position_angle(this->_id, ids, pos); - } - InstructionPacket bulk_read_position_angle(const std::vector& ids) const override { return Model::bulk_read_position_angle(this->_id, ids); @@ -247,45 +245,6 @@ namespace dynamixel { return Model::parse_present_position_angle(this->_id, st); } - // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_positions_MX(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x24); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - data_length.push_back(0x02); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } - - // Bulk operations. Only works for XL models with protocol 2. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_positions_XL(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x25); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - data_length.push_back(0x02); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } - - // Bulk operations. Only works for XM models with protocol 2. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_positions_XM(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x84); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - data_length.push_back(0x04); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } - // ================================================================= // Speed-specific @@ -310,6 +269,50 @@ namespace dynamixel { return Model::reg_moving_speed_angle(this->_id, rad_per_s, operating_mode); } + static inline InstructionPacket sync_moving_speed_angle(typename Servo::protocol_t::id_t id, const std::vector& ids, const std::vector& rad_per_s, OperatingMode operating_mode) + { + if (ids.size() != rad_per_s.size()) + throw errors::Error("Instruction: error when setting moving speeds: \n\tMismatch in vector size for ids and speeds"); + std::vector speed_ticks; + // convert radians per second to ticks + for (int i = 0; i < rad_per_s.size(); i++) + speed_ticks.push_back(round(60 * rad_per_s[i] / (two_pi * ct_t::rpm_per_tick))); + + for (int j = 0; j < speed_ticks.size(); j++) { + // The actuator is operated as a wheel (continuous rotation) + if (operating_mode == OperatingMode::wheel) { + // Check that desired speed is within the actuator's bounds + + if (!(abs(speed_ticks[j]) >= ct_t::min_goal_speed && abs(speed_ticks[j]) <= ct_t::max_goal_speed)) { + throw errors::Error("Desired speed is out actuator's bounds"); + } + + // Move negatives values in the range [ct_t::min_goal_speed, + // ct_t::2*max_goal_speed+1] + if (speed_ticks[j] < 0) { + speed_ticks[j] = -speed_ticks[j] + ct_t::max_goal_speed + 1; + } + } + // The actuator is operated as a joint (not continuous rotation) + else if (operating_mode == OperatingMode::joint) { + if (!(speed_ticks[j] >= ct_t::min_goal_speed && speed_ticks[j] <= ct_t::max_goal_speed)) { + throw errors::Error("Desired speed is out actuator's bounds"); + } + } + } + + std::vector> packed(speed_ticks.size()); + for (size_t i = 0; i < speed_ticks.size(); i++) + packed[i] = protocol_t::pack_data((typename ct_t::moving_speed_t)speed_ticks[i]); + + return sync_write_t(ct_t::moving_speed, ids, packed); + } + + InstructionPacket sync_moving_speed_angle(const std::vector& ids, const std::vector& rad_per_s, OperatingMode operating_mode = OperatingMode::joint) const override + { + return Model::sync_moving_speed_angle(this->_id, ids, rad_per_s, operating_mode); + } + // TODO: read speed from dynamixel pros to check that we do get negative values too // FIXME : replace the following by protocol specific methods ? static double parse_joint_speed(typename Servo::protocol_t::id_t id, const StatusPacket::protocol_t>& st) @@ -377,45 +380,24 @@ namespace dynamixel { return sync_write_t(ct_t::moving_speed, _get_typed(ids), packed); } - // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_speed_MX(const std::vector& ids) + static inline InstructionPacket bulk_read_speed_angle(typename Servo::protocol_t::id_t id, const std::vector& ids) { - //std::vector address; - std::vector address; //uint8_t - std::vector data_length; //uint8_t + std::vector::protocol_t::address_t> address; + std::vector::protocol_t::length_t> data_length; + typename Servo::protocol_t::address_t present_speed_address = ct_t::present_speed; + typename ct_t::present_speed_t type_address; for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x26); // 0x27 for XL and 0x26 for MX - data_length.push_back(0x02); + address.push_back(present_speed_address); + data_length.push_back(sizeof(type_address)); } - return bulk_read_t(_get_typed(ids), address, data_length); + return bulk_read_t(ids, address, data_length); } - // Bulk operations. Only works for XM models with protocol 2. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_speed_XM(const std::vector& ids) + InstructionPacket bulk_read_speed_angle(const std::vector& ids) const override { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x80); // 0x80 for XM - data_length.push_back(0x04); - } - return bulk_read_t(_get_typed(ids), address, data_length); + return Model::bulk_read_speed_angle(this->_id, ids); } - // Bulk operations. Only works for XL models with protocol 2. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_speed_XL(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x27); // 0x27 for XL - data_length.push_back(0x02); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } // ================================================================= // Torque-specific From 9ddd03798de09f6a64be5b930805f61a3aac5652 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Mon, 17 Jun 2019 11:32:38 +0200 Subject: [PATCH 17/22] operating mode update - PWM/Voltage and multi_turn_torque --- src/demos/operating_mode.cpp | 7 +++++-- src/dynamixel/operating_mode.hpp | 11 +++++++---- 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/src/demos/operating_mode.cpp b/src/demos/operating_mode.cpp index dd0bd2e9..207d64e9 100644 --- a/src/demos/operating_mode.cpp +++ b/src/demos/operating_mode.cpp @@ -26,8 +26,11 @@ std::string string_operating_mode(Usb2Dynamixel& controller, typename Protocol:: case OperatingMode::multi_turn: str_mode = "multi-turn"; break; - case OperatingMode::voltage: - str_mode = "voltage"; + case OperatingMode::multi_turn_torque: + str_mode = "multi-turn-torque"; + break; + case OperatingMode::PWM: + str_mode = "PWM"; break; default: str_mode = "unknown"; diff --git a/src/dynamixel/operating_mode.hpp b/src/dynamixel/operating_mode.hpp index 0bfc28f6..c944db38 100644 --- a/src/dynamixel/operating_mode.hpp +++ b/src/dynamixel/operating_mode.hpp @@ -86,9 +86,9 @@ namespace dynamixel { else if (4 == mode) return OperatingMode::multi_turn; else if (5 == mode) - return OperatingMode::torque; + return OperatingMode::multi_turn_torque; else if (16 == mode) - return OperatingMode::voltage; + return OperatingMode::PWM; else return OperatingMode::unknown; } @@ -124,8 +124,11 @@ namespace dynamixel { case OperatingMode::multi_turn: return "multi_turn"; break; - case OperatingMode::voltage: - return "voltage"; + case OperatingMode::multi_turn_torque: + return "multi_turn_torque"; + break; + case OperatingMode::PWM: + return "PWM/Voltage"; break; case OperatingMode::unknown: default: From 53f6b7a3b706a2237cdc23f4c9e607d474ccbe3a Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Mon, 17 Jun 2019 11:33:52 +0200 Subject: [PATCH 18/22] command line for bulk and sync added --- src/dynamixel/servos/base_servo.hpp | 3 +- src/tools/command_line_utility.hpp | 2 +- src/tools/utility.hpp | 51 ++++++++++++++++++++++++++--- 3 files changed, 50 insertions(+), 6 deletions(-) diff --git a/src/dynamixel/servos/base_servo.hpp b/src/dynamixel/servos/base_servo.hpp index e273825f..4e3ff05b 100644 --- a/src/dynamixel/servos/base_servo.hpp +++ b/src/dynamixel/servos/base_servo.hpp @@ -31,7 +31,8 @@ namespace dynamixel { wheel, joint, multi_turn, - voltage, + multi_turn_torque, + PWM, unknown }; namespace servos { diff --git a/src/tools/command_line_utility.hpp b/src/tools/command_line_utility.hpp index 2aa9dac5..bbdb3fba 100644 --- a/src/tools/command_line_utility.hpp +++ b/src/tools/command_line_utility.hpp @@ -637,7 +637,7 @@ namespace dynamixel { _dyn_util.detect_servos(); std::pair, std::vector> speeds - = _dyn_util.get_speed(); + = _dyn_util.get_speed_bulk(); std::cout << "Goal angular velocities of the actuators (rad/s):" << std::endl; diff --git a/src/tools/utility.hpp b/src/tools/utility.hpp index 48342f3e..b3476132 100644 --- a/src/tools/utility.hpp +++ b/src/tools/utility.hpp @@ -685,10 +685,13 @@ namespace dynamixel { throw errors::UtilityError("set_speed_sync(vector, vector): the " "vectors of IDs and speeds should have " "the same length"); - - _serial_interface.send( - std::make_shared(0)->set_moving_speeds(ids, speeds, OperatingMode::wheel)); - + std::vector ids_cast; + for (int i = 0; i < ids.size(); i++) { + ids_cast.push_back((uint8_t)ids[i]); + } + // _serial_interface.send( + // std::make_shared(0)->set_moving_speeds(ids, speeds, OperatingMode::wheel)); + _serial_interface.send(_servos.at(ids[0])->sync_moving_speed_angle(ids_cast, speeds, OperatingMode::joint)); StatusPacket status; for (int i = 0; i < ids.size(); i++) { _serial_interface.recv(status); @@ -771,6 +774,46 @@ namespace dynamixel { return std::make_pair(ids, speeds); } + std::pair, std::vector> + get_speed_bulk() const + { + check_scanned(); + + std::vector speeds; + std::vector ids; + + for (auto servo : _servos) { + ids.push_back(servo.first); + } + + std::vector ids_cast; + for (int i = 0; i < ids.size(); i++) { + ids_cast.push_back((uint8_t)ids[i]); + } + + _serial_interface.send(_servos.at(ids[0])->bulk_read_speed_angle(ids_cast)); + + StatusPacket status; + for (auto servo : _servos) { + + _serial_interface.recv(status); + + // parse response to get the speed + if (status.valid()) { + + speeds.push_back( + servo.second->parse_present_position_angle(status)); + } + else { + std::stringstream message; + message << (int)servo.first << " did not answer to the request for " + << "its speed"; + throw errors::Error(message.str()); + } + } + return std::make_pair(ids, speeds); + } + /** Enable (or disable) an actuator. By default, it will enable the actuator, unless one gives the `enable` argument with value false. Here, enabling an acuator means that it From cb2ce1bb8fe84069d3aa48ac07c0c7142437001e Mon Sep 17 00:00:00 2001 From: PedroDesRobots Date: Mon, 17 Jun 2019 11:36:11 +0200 Subject: [PATCH 19/22] text updated --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 15d892c8..cc37050d 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ If you want your USB2AX serial interface to appear in `/dev` as `usb2axN` (where If the reading time seems too long, check the value of the USB latency timer. On ubuntu this value is retrieved with this command `cat /sys/bus/usb-serial/devices/ttyUSB0/latency_timer`. It is the time, in milliseconds, for which the device driver buffers data before making it available. -You can change this timer with the command `echo 0 | sudo tee /sys/bus/usb-serial/devices/ttyUSB0/latency_timer` which sets it to 4 ms for the device `/dev/ttyUSB0`. +You can change this timer with the command `echo 0 | sudo tee /sys/bus/usb-serial/devices/ttyUSB0/latency_timer` which sets it to 0 ms for the device `/dev/ttyUSB0`. ## Using Libdynamixel on Mac From 3ef3b74c8ca05ee8441acc1692c699ee5ac3a51c Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Mon, 17 Jun 2019 11:40:18 +0200 Subject: [PATCH 20/22] usb2dynamixel fixed --- src/dynamixel/controllers/usb2dynamixel.hpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dynamixel/controllers/usb2dynamixel.hpp b/src/dynamixel/controllers/usb2dynamixel.hpp index a86bdbdf..e90b8902 100644 --- a/src/dynamixel/controllers/usb2dynamixel.hpp +++ b/src/dynamixel/controllers/usb2dynamixel.hpp @@ -32,10 +32,10 @@ namespace dynamixel { "a socket and has been marked nonblocking (O_NONBLOCK), and " "the write would block."; // On some OS, EWOULDBLOCK has the same value as EAGAIN and would hence not compile. - // case EWOULDBLOCK: - // return "EWOULDBLOCK: The file descriptor fd refers to a socket " - // "and has been marked nonblocking (O_NONBLOCK), and the " - // "write would block."; + case EWOULDBLOCK: + return "EWOULDBLOCK: The file descriptor fd refers to a socket " + "and has been marked nonblocking (O_NONBLOCK), and the " + "write would block."; case EBADF: return "EBADF: fd is not a valid file descriptor or is not open for writing."; case EDESTADDRREQ: From 58ccb5d163e35b5d0ed024d1c94851480396c746 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Mon, 17 Jun 2019 11:44:30 +0200 Subject: [PATCH 21/22] comment case EWOULDBLOCK --- src/dynamixel/controllers/usb2dynamixel.hpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/dynamixel/controllers/usb2dynamixel.hpp b/src/dynamixel/controllers/usb2dynamixel.hpp index e90b8902..a86bdbdf 100644 --- a/src/dynamixel/controllers/usb2dynamixel.hpp +++ b/src/dynamixel/controllers/usb2dynamixel.hpp @@ -32,10 +32,10 @@ namespace dynamixel { "a socket and has been marked nonblocking (O_NONBLOCK), and " "the write would block."; // On some OS, EWOULDBLOCK has the same value as EAGAIN and would hence not compile. - case EWOULDBLOCK: - return "EWOULDBLOCK: The file descriptor fd refers to a socket " - "and has been marked nonblocking (O_NONBLOCK), and the " - "write would block."; + // case EWOULDBLOCK: + // return "EWOULDBLOCK: The file descriptor fd refers to a socket " + // "and has been marked nonblocking (O_NONBLOCK), and the " + // "write would block."; case EBADF: return "EBADF: fd is not a valid file descriptor or is not open for writing."; case EDESTADDRREQ: From 7299f69e425fa8d60eb38ba582fb4a2bfa86d182 Mon Sep 17 00:00:00 2001 From: Pierre Desreumaux Date: Tue, 18 Jun 2019 11:18:51 +0200 Subject: [PATCH 22/22] delete HEAD comment --- src/dynamixel/controllers/usb2dynamixel.hpp | 12 +-- src/dynamixel/servos/servo.hpp | 90 --------------------- src/tools/utility.hpp | 6 -- 3 files changed, 7 insertions(+), 101 deletions(-) diff --git a/src/dynamixel/controllers/usb2dynamixel.hpp b/src/dynamixel/controllers/usb2dynamixel.hpp index a86bdbdf..3b6c6bb8 100644 --- a/src/dynamixel/controllers/usb2dynamixel.hpp +++ b/src/dynamixel/controllers/usb2dynamixel.hpp @@ -31,11 +31,13 @@ namespace dynamixel { return "EAGAIN: The file descriptor fd refers to a file other than " "a socket and has been marked nonblocking (O_NONBLOCK), and " "the write would block."; - // On some OS, EWOULDBLOCK has the same value as EAGAIN and would hence not compile. - // case EWOULDBLOCK: - // return "EWOULDBLOCK: The file descriptor fd refers to a socket " - // "and has been marked nonblocking (O_NONBLOCK), and the " - // "write would block."; + // On some OS, EWOULDBLOCK has the same value as EAGAIN and would hence not compile. +#ifdef __APPLE__ + case EWOULDBLOCK: + return "EWOULDBLOCK: The file descriptor fd refers to a socket " + "and has been marked nonblocking (O_NONBLOCK), and the " + "write would block."; +#endif case EBADF: return "EBADF: fd is not a valid file descriptor or is not open for writing."; case EDESTADDRREQ: diff --git a/src/dynamixel/servos/servo.hpp b/src/dynamixel/servos/servo.hpp index 74a96232..e047bc2f 100644 --- a/src/dynamixel/servos/servo.hpp +++ b/src/dynamixel/servos/servo.hpp @@ -246,96 +246,6 @@ namespace dynamixel { return Model::parse_present_position_angle(this->_id, st); } -<<<<<<< HEAD - // static InstructionPacket::protocol_t> get_current_positions_all(typename Servo::protocol_t::id_t id, const std::vector ids) - // { - // typename Servo::ct_t::present_position_t pos; - // std::vector address; - // std::vector data_length; - // for (size_t i = 0; i < ids.size(); i++) { - // address.push_back(pos); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - // data_length.push_back(sizeof(pos)); - // } - // return bulk_read_t(_get_typed(ids), address, data_length); - // } - // - // InstructionPacket::protocol_t> get_current_positions_all(const std::vector ids) const override - // { - // return Model::get_current_positions_all(this->_id, ids); - // } - - // Sync operations. Only works if the models are known and they are all the same - // use case : std::make_shared(0)->set_goal_positions(ids, angles)); replace MODEL_SERVO by Mx28 or Xl320... - template - static InstructionPacket set_goal_positions_angle(const std::vector& ids, const std::vector& angles) - { - if (ids.size() != angles.size()) - throw errors::VectorSizesDifferError("set_goal_positions_angle", "ids", "angles"); - // Convert from radians to ticks (after going through degrees) - std::vector positions; - for (auto pos : angles) - positions.push_back((((pos * 57.2958) - ct_t::min_goal_angle_deg) * (ct_t::max_goal_position - ct_t::min_goal_position) / (ct_t::max_goal_angle_deg - ct_t::min_goal_angle_deg)) + ct_t::min_goal_position); - // Pack the angles into bytes (for sending) - std::vector> packed(positions.size()); - for (auto tick_pos : positions) - packed.push_back(protocol_t::pack_data((typename ct_t::goal_position_t)tick_pos)); - - return sync_write_t(ct_t::goal_position, _get_typed(ids), packed); - } - - template - static InstructionPacket set_goal_positions(const std::vector& ids, const std::vector& pos) - { - if (ids.size() != pos.size()) - throw errors::VectorSizesDifferError("set_goal_positions", "ids", "pos"); - std::vector> packed(pos.size()); - for (size_t i = 0; i < pos.size(); i++) - packed[i] = protocol_t::pack_data((typename ct_t::goal_position_t)pos[i]); - - return sync_write_t(ct_t::goal_position, _get_typed(ids), packed); - } - - // Bulk operations. Only works for MX models with protocol 1. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_positions_MX(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x24); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - data_length.push_back(0x02); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } - - // Bulk operations. Only works for XL models with protocol 2. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_positions_XL(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x25); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - data_length.push_back(0x02); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } - - // Bulk operations. Only works for XM models with protocol 2. Only works if the models are known and they are all the same - template - static InstructionPacket get_current_positions_XM(const std::vector& ids) - { - std::vector address; - std::vector data_length; - for (size_t i = 0; i < ids.size(); i++) { - address.push_back(0x84); // adress 36 on MX models (0x24) -- adress 37 on XL models (0x25) - data_length.push_back(0x04); - } - return bulk_read_t(_get_typed(ids), address, data_length); - } - -======= ->>>>>>> dev // ================================================================= // Speed-specific diff --git a/src/tools/utility.hpp b/src/tools/utility.hpp index 86d23f3f..b3476132 100644 --- a/src/tools/utility.hpp +++ b/src/tools/utility.hpp @@ -433,11 +433,6 @@ namespace dynamixel { throw errors::UtilityError("set_position(vector, vector): the " "vectors of IDs and angles should have " "the same length"); -<<<<<<< HEAD - _serial_interface.send( - std::make_shared(0)->set_goal_positions_angle(ids, angles)); //Mx28 - -======= std::vector ids_cast; for (int i = 0; i < ids.size(); i++) { ids_cast.push_back((uint8_t)ids[i]); @@ -445,7 +440,6 @@ namespace dynamixel { // _serial_interface.send( // std::make_shared(0)->set_goal_positions(ids, angles)); //Mx28 _serial_interface.send(_servos.at(ids[0])->sync_goal_position_angle(ids_cast, angles)); ->>>>>>> dev StatusPacket status; for (int i = 0; i < ids.size(); i++) { _serial_interface.recv(status);